comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
@moderakh No, I did not change it. Just point out that V2 and .Net SDK does not have that specific check. We added the eventual check in this PR: https://github.com/Azure/azure-sdk-for-java/pull/16111/files | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
we don't include session token for eventual consistency on purpose the intention is to bypass using SessionContainer. Not using that helps to avoid unnecessary perf hit. V2 and DotNet SDK as you said do not have this optimization. we gain 5-10% perf improvement by not accessing SessionContainer when not needed. see this PR for more info when we introduced this optimization: https://github.com/Azure/azure-sdk-for-java/pull/8064 | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
wow I see, thanks @moderakh | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
Added tests, a little bit different compared to use ReflectionUtils.TransportClient | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
awesome test for gateway @xinlian12 . later we should add a similar test for direct mode. For that you will need something similar to ReflectionUtils.TransportClient. As this PR scope is only gateway, doesn't have to happen in this PR. please track similar test for direct for future test improvement. | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
Should these be fit into one single line? | public boolean addTrustedIssuer(String... issuers) {
if (ArrayUtils.isEmpty(issuers)) {
return false;
}
return trustedIssuers
.addAll(Arrays.stream(issuers).collect(Collectors.toSet()));
} | .addAll(Arrays.stream(issuers).collect(Collectors.toSet())); | public boolean addTrustedIssuer(String... issuers) {
if (ArrayUtils.isEmpty(issuers)) {
return false;
}
return trustedIssuers
.addAll(Arrays.stream(issuers).collect(Collectors.toSet()));
} | class AADTrustedIssuerRepository {
private static final Logger LOGGER = LoggerFactory.getLogger(AADTrustedIssuerRepository.class);
private static final String LOGIN_MICROSOFT_ONLINE_ISSUER = "https:
private static final String STS_WINDOWS_ISSUER = "https:
private static final String STS_CHINA_CLOUD_API_ISSUER = "https:
private static final String PATH_DELIMITER = "/";
private static final String PATH_DELIMITER_V2 = "/v2.0";
private final List<String> trustedIssuers = new ArrayList<>();
private final String tenantId;
public AADTrustedIssuerRepository(String tenantId) {
this.tenantId = tenantId;
trustedIssuers.addAll(buildAADIssuers(PATH_DELIMITER));
trustedIssuers.addAll(buildAADIssuers(PATH_DELIMITER_V2));
}
private List<String> buildAADIssuers(String delimiter) {
return Arrays.asList(LOGIN_MICROSOFT_ONLINE_ISSUER, STS_WINDOWS_ISSUER, STS_CHINA_CLOUD_API_ISSUER)
.stream()
.map(s -> s + tenantId + delimiter)
.collect(Collectors.toList());
}
public void addB2CIssuer(String baseUri) {
Assert.notNull(baseUri, "tenantName cannot be null.");
trustedIssuers.add(String.format(resolveBaseUri(baseUri) + "/%s/v2.0/", tenantId));
}
/**
* Only the V2 version of Access Token is supported when using Azure AD B2C user flows.
*
* @param baseUri The base uri is the domain part of the endpoint.
* @param userFlows The all user flows which is created under b2c tenant.
*/
public void addB2CUserFlowIssuers(String baseUri, UserFlows userFlows) {
Assert.notNull(userFlows, "userFlows cannot be null.");
String resolvedBaseUri = resolveBaseUri(baseUri);
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getSignUpOrSignIn());
if (!StringUtils.isEmpty(userFlows.getProfileEdit())) {
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getProfileEdit());
}
if (!StringUtils.isEmpty(userFlows.getPasswordReset())) {
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getPasswordReset());
}
}
private void creatB2CUserFlowIssuer(String resolveBaseUri, String userFlowName) {
trustedIssuers.add(String.format(resolveBaseUri + "/tfp/%s/%s/v2.0/", tenantId, userFlowName));
}
public List<String> getTrustedIssuers() {
return Collections.unmodifiableList(trustedIssuers);
}
/**
* Resolve the base uri to get scheme and host.
* @param baseUri Base uri in the configuration file.
*/
private String resolveBaseUri(String baseUri) {
Assert.notNull(baseUri, "baseUri cannot be null");
try {
URI uri = new URI(baseUri);
return uri.getScheme() + ":
} catch (URISyntaxException e) {
LOGGER.error("Resolve the base uri exception.");
throw new RuntimeException("Resolve the base uri:'" + baseUri + "' exception.");
}
}
} | class AADTrustedIssuerRepository {
private static final Logger LOGGER = LoggerFactory.getLogger(AADTrustedIssuerRepository.class);
private static final String LOGIN_MICROSOFT_ONLINE_ISSUER = "https:
private static final String STS_WINDOWS_ISSUER = "https:
private static final String STS_CHINA_CLOUD_API_ISSUER = "https:
private static final String PATH_DELIMITER = "/";
private static final String PATH_DELIMITER_V2 = "/v2.0";
private final List<String> trustedIssuers = new ArrayList<>();
private final String tenantId;
public AADTrustedIssuerRepository(String tenantId) {
this.tenantId = tenantId;
trustedIssuers.addAll(buildAADIssuers(PATH_DELIMITER));
trustedIssuers.addAll(buildAADIssuers(PATH_DELIMITER_V2));
}
private List<String> buildAADIssuers(String delimiter) {
return Arrays.asList(LOGIN_MICROSOFT_ONLINE_ISSUER, STS_WINDOWS_ISSUER, STS_CHINA_CLOUD_API_ISSUER)
.stream()
.map(s -> s + tenantId + delimiter)
.collect(Collectors.toList());
}
public void addB2CIssuer(String baseUri) {
Assert.notNull(baseUri, "tenantName cannot be null.");
trustedIssuers.add(String.format(resolveBaseUri(baseUri) + "/%s/v2.0/", tenantId));
}
/**
* Only the V2 version of Access Token is supported when using Azure AD B2C user flows.
*
* @param baseUri The base uri is the domain part of the endpoint.
* @param userFlows The all user flows which is created under b2c tenant.
*/
public void addB2CUserFlowIssuers(String baseUri, UserFlows userFlows) {
Assert.notNull(userFlows, "userFlows cannot be null.");
String resolvedBaseUri = resolveBaseUri(baseUri);
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getSignUpOrSignIn());
if (!StringUtils.isEmpty(userFlows.getProfileEdit())) {
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getProfileEdit());
}
if (!StringUtils.isEmpty(userFlows.getPasswordReset())) {
creatB2CUserFlowIssuer(resolvedBaseUri, userFlows.getPasswordReset());
}
}
private void creatB2CUserFlowIssuer(String resolveBaseUri, String userFlowName) {
trustedIssuers.add(String.format(resolveBaseUri + "/tfp/%s/%s/v2.0/", tenantId, userFlowName));
}
public List<String> getTrustedIssuers() {
return Collections.unmodifiableList(trustedIssuers);
}
/**
* Resolve the base uri to get scheme and host.
* @param baseUri Base uri in the configuration file.
*/
private String resolveBaseUri(String baseUri) {
Assert.notNull(baseUri, "baseUri cannot be null");
try {
URI uri = new URI(baseUri);
return uri.getScheme() + ":
} catch (URISyntaxException e) {
LOGGER.error("Resolve the base uri exception.");
throw new RuntimeException("Resolve the base uri:'" + baseUri + "' exception.");
}
}
} |
Should we gear samples towards using connection strings or AAD? | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder().connectionString(connectionString).buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
ConfigurationSetting updateSetting = client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue");
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, watchingSettings, Arrays.asList(updateSetting));
watchingSettings.forEach(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
});
} | String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}"; | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
ConfigurationSetting updatedSetting = client.setConfigurationSetting(
prodDBConnectionKey, prodDBConnectionLabel, "updatedProdValue");
System.out.println("Updated settings:");
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(), updatedSetting.getETag());
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
System.out.println("Deleting settings:");
watchingSettings.forEach(setting -> {
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
System.out.printf("\tkey: %s, value: %s.%n", setting.getKey(), setting.getValue());
});
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel());
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
ConfigurationSetting retrievedSetting = client.getConfigurationSetting(setting.getKey(),
setting.getLabel());
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, "
+ "preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag, latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
return true;
}
return false;
})
.collect(Collectors.toList());
}
} |
I would have thought refresh to be a method which implicitly updates all configurations that are out of date and not just check if any are | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder().connectionString(connectionString).buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
ConfigurationSetting updateSetting = client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue");
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, watchingSettings, Arrays.asList(updateSetting));
watchingSettings.forEach(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
});
} | refresh(client, watchingSettings, Arrays.asList(updateSetting)); | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
ConfigurationSetting updatedSetting = client.setConfigurationSetting(
prodDBConnectionKey, prodDBConnectionLabel, "updatedProdValue");
System.out.println("Updated settings:");
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(), updatedSetting.getETag());
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
System.out.println("Deleting settings:");
watchingSettings.forEach(setting -> {
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
System.out.printf("\tkey: %s, value: %s.%n", setting.getKey(), setting.getValue());
});
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel());
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
ConfigurationSetting retrievedSetting = client.getConfigurationSetting(setting.getKey(),
setting.getLabel());
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, "
+ "preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag, latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
return true;
}
return false;
})
.collect(Collectors.toList());
}
} |
We shouldn't use blocking in our asynchronous examples, blocking is generally not preferred and can lead to runtime exceptions if it is used in the wrong spot | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
final ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> settings = Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList()).block();
ConfigurationSetting updateSetting =
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue").block();
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, settings, Arrays.asList(updateSetting));
final Stream<ConfigurationSetting> stream = settings == null ? Stream.empty() : settings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | .then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList()).block(); | public static void main(String[] args) throws InterruptedException {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
String updatedProdDBConnectionValue = "updateProdValue";
List<ConfigurationSetting> watchingSettings = new ArrayList<>();
Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList())
.subscribe(
settings -> watchingSettings.addAll(settings),
error -> System.err.printf("There was an error while adding the settings: %s.%n", error),
() -> System.out.println("Add settings completed.")
);
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tWatching key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue)
.subscribe(
updatedSetting -> {
System.out.println("Updated settings:");
System.out.printf("\tUpdated key=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(),
updatedSetting.getETag());
},
error -> System.err.printf("There was an error while updating the setting: %s.%n", error),
() -> System.out.printf("Update setting completed, key=%s, label=%s, value=%s.%n",
prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue));
TimeUnit.MILLISECONDS.sleep(1000);
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tRefreshed key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Deleting settings:");
Stream<ConfigurationSetting> stream = watchingSettings == null ? Stream.empty() : watchingSettings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("\tDeleting key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationAsyncClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel()).block();
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationAsyncClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
final boolean[] isUpdated = new boolean[1];
String key = setting.getKey();
String label = setting.getLabel();
client.getConfigurationSetting(key, label)
.subscribe(
retrievedSetting -> {
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is "
+ "updated, preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag,
latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
isUpdated[0] = true;
}
},
error -> System.err.printf("There was an error while retrieving the setting: %s.%n",
error),
() -> System.out.printf("Retrieve setting completed, key=%s, label=%s.%n", key, label));
try {
TimeUnit.MILLISECONDS.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return isUpdated[0];
})
.collect(Collectors.toList());
}
} |
since other samples already using connection string. We can follow the same pattern. ConnectionString is easier to use when people trying to run samples. | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder().connectionString(connectionString).buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
ConfigurationSetting updateSetting = client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue");
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, watchingSettings, Arrays.asList(updateSetting));
watchingSettings.forEach(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
});
} | String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}"; | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
ConfigurationSetting updatedSetting = client.setConfigurationSetting(
prodDBConnectionKey, prodDBConnectionLabel, "updatedProdValue");
System.out.println("Updated settings:");
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(), updatedSetting.getETag());
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
System.out.println("Deleting settings:");
watchingSettings.forEach(setting -> {
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
System.out.printf("\tkey: %s, value: %s.%n", setting.getKey(), setting.getValue());
});
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel());
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
ConfigurationSetting retrievedSetting = client.getConfigurationSetting(setting.getKey(),
setting.getLabel());
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, "
+ "preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag, latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
return true;
}
return false;
})
.collect(Collectors.toList());
}
} |
updated | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
final ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> settings = Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList()).block();
ConfigurationSetting updateSetting =
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue").block();
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, settings, Arrays.asList(updateSetting));
final Stream<ConfigurationSetting> stream = settings == null ? Stream.empty() : settings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | .then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList()).block(); | public static void main(String[] args) throws InterruptedException {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
String updatedProdDBConnectionValue = "updateProdValue";
List<ConfigurationSetting> watchingSettings = new ArrayList<>();
Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList())
.subscribe(
settings -> watchingSettings.addAll(settings),
error -> System.err.printf("There was an error while adding the settings: %s.%n", error),
() -> System.out.println("Add settings completed.")
);
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tWatching key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue)
.subscribe(
updatedSetting -> {
System.out.println("Updated settings:");
System.out.printf("\tUpdated key=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(),
updatedSetting.getETag());
},
error -> System.err.printf("There was an error while updating the setting: %s.%n", error),
() -> System.out.printf("Update setting completed, key=%s, label=%s, value=%s.%n",
prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue));
TimeUnit.MILLISECONDS.sleep(1000);
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tRefreshed key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Deleting settings:");
Stream<ConfigurationSetting> stream = watchingSettings == null ? Stream.empty() : watchingSettings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("\tDeleting key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationAsyncClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel()).block();
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationAsyncClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
final boolean[] isUpdated = new boolean[1];
String key = setting.getKey();
String label = setting.getLabel();
client.getConfigurationSetting(key, label)
.subscribe(
retrievedSetting -> {
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is "
+ "updated, preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag,
latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
isUpdated[0] = true;
}
},
error -> System.err.printf("There was an error while retrieving the setting: %s.%n",
error),
() -> System.out.printf("Retrieve setting completed, key=%s, label=%s.%n", key, label));
try {
TimeUnit.MILLISECONDS.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return isUpdated[0];
})
.collect(Collectors.toList());
}
} |
updated | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder().connectionString(connectionString).buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
ConfigurationSetting updateSetting = client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "updateProdValue");
System.out.printf("Updated setting's key: %s, value: %s, ETag: %s.%n",
updateSetting.getKey(), updateSetting.getValue(), updateSetting.getETag());
refresh(client, watchingSettings, Arrays.asList(updateSetting));
watchingSettings.forEach(setting -> {
System.out.printf("Deleting Setting's key: %s, value: %s.%n", setting.getKey(), setting.getValue());
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
});
} | refresh(client, watchingSettings, Arrays.asList(updateSetting)); | public static void main(String[] args) {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
List<ConfigurationSetting> watchingSettings = Arrays.asList(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue")
);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
ConfigurationSetting updatedSetting = client.setConfigurationSetting(
prodDBConnectionKey, prodDBConnectionLabel, "updatedProdValue");
System.out.println("Updated settings:");
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(), updatedSetting.getETag());
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tkey=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
System.out.println("Deleting settings:");
watchingSettings.forEach(setting -> {
client.deleteConfigurationSetting(setting.getKey(), setting.getLabel());
System.out.printf("\tkey: %s, value: %s.%n", setting.getKey(), setting.getValue());
});
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
private static boolean refresh(ConfigurationClient client, List<ConfigurationSetting> watchSettings, List<ConfigurationSetting> latestSettings) {
for (ConfigurationSetting watchSetting : watchSettings) {
ConfigurationSetting latestSetting = client.getConfigurationSetting(watchSetting.getKey(), watchSetting.getLabel());
String latestETag = latestSetting.getETag();
String previousETag = watchSetting.getETag();
if (!latestETag.equals(previousETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, preview ETag value [%s] not " +
"equals to current value [%s], will send refresh event.%n",
watchSetting.getKey(), watchSetting.getLabel(), previousETag, latestETag);
return true;
}
}
return false;
}
} | class WatchFeature {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
ConfigurationSetting retrievedSetting = client.getConfigurationSetting(setting.getKey(),
setting.getLabel());
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is updated, "
+ "preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag, latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
return true;
}
return false;
})
.collect(Collectors.toList());
}
} |
nit: `Mono.when` could be used here instead | public static void main(String[] args) throws InterruptedException {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
String updatedProdDBConnectionValue = "updateProdValue";
List<ConfigurationSetting> watchingSettings = new ArrayList<>();
Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList())
.subscribe(
settings -> watchingSettings.addAll(settings),
error -> System.err.printf("There was an error while adding the settings: %s.%n", error),
() -> System.out.println("Add settings completed.")
);
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tWatching key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue)
.subscribe(
updatedSetting -> {
System.out.println("Updated settings:");
System.out.printf("\tUpdated key=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(),
updatedSetting.getETag());
},
error -> System.err.printf("There was an error while updating the setting: %s.%n", error),
() -> System.out.printf("Update setting completed, key=%s, label=%s, value=%s.%n",
prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue));
TimeUnit.MILLISECONDS.sleep(1000);
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tRefreshed key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Deleting settings:");
Stream<ConfigurationSetting> stream = watchingSettings == null ? Stream.empty() : watchingSettings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("\tDeleting key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | Flux.concat( | public static void main(String[] args) throws InterruptedException {
String connectionString = "endpoint={endpoint_value};id={id_value};secret={secret_value}";
ConfigurationAsyncClient client = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
String prodDBConnectionKey = "prodDBConnection";
String prodDBConnectionLabel = "prodLabel";
String updatedProdDBConnectionValue = "updateProdValue";
List<ConfigurationSetting> watchingSettings = new ArrayList<>();
Flux.concat(
client.addConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, "prodValue"),
client.addConfigurationSetting("stageDBConnection", "stageLabel", "stageValue"))
.then(client.listConfigurationSettings(new SettingSelector().setKeyFilter("*")).collectList())
.subscribe(
settings -> watchingSettings.addAll(settings),
error -> System.err.printf("There was an error while adding the settings: %s.%n", error),
() -> System.out.println("Add settings completed.")
);
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Watching settings:");
for (ConfigurationSetting setting : watchingSettings) {
System.out.printf("\tWatching key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
client.setConfigurationSetting(prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue)
.subscribe(
updatedSetting -> {
System.out.println("Updated settings:");
System.out.printf("\tUpdated key=%s, label=%s, value=%s, ETag=%s.%n",
updatedSetting.getKey(), updatedSetting.getLabel(), updatedSetting.getValue(),
updatedSetting.getETag());
},
error -> System.err.printf("There was an error while updating the setting: %s.%n", error),
() -> System.out.printf("Update setting completed, key=%s, label=%s, value=%s.%n",
prodDBConnectionKey, prodDBConnectionLabel, updatedProdDBConnectionValue));
TimeUnit.MILLISECONDS.sleep(1000);
List<ConfigurationSetting> refreshedSettings = refresh(client, watchingSettings);
System.out.println("Refreshed settings:");
for (ConfigurationSetting setting : refreshedSettings) {
System.out.printf("\tRefreshed key=%s, label=%s, value=%s, ETag=%s.%n",
setting.getKey(), setting.getLabel(), setting.getValue(), setting.getETag());
}
TimeUnit.MILLISECONDS.sleep(1000);
System.out.println("Deleting settings:");
Stream<ConfigurationSetting> stream = watchingSettings == null ? Stream.empty() : watchingSettings.stream();
Flux.merge(stream.map(setting -> {
System.out.printf("\tDeleting key: %s, value: %s.%n", setting.getKey(), setting.getValue());
return client.deleteConfigurationSettingWithResponse(setting, false);
}).collect(Collectors.toList())).blockLast();
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationAsyncClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
final boolean[] isUpdated = new boolean[1];
String key = setting.getKey();
String label = setting.getLabel();
client.getConfigurationSetting(key, label)
.subscribe(
retrievedSetting -> {
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is "
+ "updated, preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag,
latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
isUpdated[0] = true;
}
},
error -> System.err.printf("There was an error while retrieving the setting: %s.%n",
error),
() -> System.out.printf("Retrieve setting completed, key=%s, label=%s.%n", key, label));
try {
TimeUnit.MILLISECONDS.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return isUpdated[0];
})
.collect(Collectors.toList());
}
} | class WatchFeatureAsync {
/**
* Runs the sample algorithm and demonstrates how to read configuration setting revision history.
*
* @param args Unused. Arguments to the program.
*/
/**
* A refresh method that runs every day to update settings and returns a updated settings.
*
* @param client a configuration client.
* @param watchSettings a list of settings in the watching store.
*
* @return a list of updated settings that doesn't match previous ETag value.
*/
private static List<ConfigurationSetting> refresh(ConfigurationAsyncClient client,
List<ConfigurationSetting> watchSettings) {
return watchSettings
.stream()
.filter(setting -> {
final boolean[] isUpdated = new boolean[1];
String key = setting.getKey();
String label = setting.getLabel();
client.getConfigurationSetting(key, label)
.subscribe(
retrievedSetting -> {
String latestETag = retrievedSetting.getETag();
String watchingETag = setting.getETag();
if (!latestETag.equals(watchingETag)) {
System.out.printf(
"Some keys in watching key store matching the key [%s] and label [%s] is "
+ "updated, preview ETag value [%s] not equals to current value [%s].%n",
retrievedSetting.getKey(), retrievedSetting.getLabel(), watchingETag,
latestETag);
setting.setETag(latestETag).setValue(retrievedSetting.getValue());
isUpdated[0] = true;
}
},
error -> System.err.printf("There was an error while retrieving the setting: %s.%n",
error),
() -> System.out.printf("Retrieve setting completed, key=%s, label=%s.%n", key, label));
try {
TimeUnit.MILLISECONDS.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return isUpdated[0];
})
.collect(Collectors.toList());
}
} |
nit: can we shorten this by adding another import? | public PhoneNumberClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
com.azure.communication.common.implementation.CommunicationConnectionString connectionStringObject = new com.azure.communication.common.implementation.CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
this
.endpoint(endpoint)
.accessKey(accessKey);
return this;
} | com.azure.communication.common.implementation.CommunicationConnectionString connectionStringObject = new com.azure.communication.common.implementation.CommunicationConnectionString(connectionString); | public PhoneNumberClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
this
.endpoint(endpoint)
.accessKey(accessKey);
return this;
} | class PhoneNumberClientBuilder {
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure-communication-administration.properties");
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final ClientLogger logger = new ClientLogger(PhoneNumberClientBuilder.class);
private PhoneNumberServiceVersion version;
private String endpoint;
private HttpPipeline pipeline;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private CommunicationClientCredential accessKeyCredential;
private TokenCredential tokenCredential;
private Configuration configuration;
private final List<HttpPipelinePolicy> additionalPolicies = new ArrayList<>();
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code endpoint} is {@code null}.
*/
public PhoneNumberClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Sets the HTTP pipeline to use for the service client
* <p>
* If {@code pipeline} is set, all other settings aside from
* {@link PhoneNumberClientBuilder
*
* @param pipeline HttpPipeline to use
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}
/**
* Set HttpClient to use
*
* @param httpClient HttpClient to use
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code httpClient} is {@code null}.
*/
public PhoneNumberClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = httpClient;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link HttpLogDetailLevel
*
* @param httpLogOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder httpLogOptions(HttpLogOptions httpLogOptions) {
this.httpLogOptions = httpLogOptions;
return this;
}
/**
* Set CommunicationClientCredential for authorization
*
* @param accessKey access key for initalizing CommunicationClientCredential
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code accessKey} is {@code null}.
*/
public PhoneNumberClientBuilder accessKey(String accessKey) {
Objects.requireNonNull(accessKey, "'accessKey' cannot be null.");
this.accessKeyCredential = new CommunicationClientCredential(accessKey);
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests.
* @return The updated {@link CommunicationIdentityClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public PhoneNumberClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Set the endpoint and CommunicationClientCredential for authorization
*
* @param connectionString connection string for setting endpoint and initalizing CommunicationClientCredential
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code connectionString} is {@code null}.
*/
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public PhoneNumberClientBuilder addPolicy(HttpPipelinePolicy policy) {
this.additionalPolicies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Sets the {@link PhoneNumberServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link PhoneNumberServiceVersion} of the service to be used when making requests.
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder serviceVersion(PhoneNumberServiceVersion version) {
this.version = version;
return this;
}
/**
* Create synchronous client applying CommunicationClientCredentialPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return {@link PhoneNumberClient} instance
*/
public PhoneNumberClient buildClient() {
return new PhoneNumberClient(this.buildAsyncClient());
}
/**
* Create asynchronous client applying CommunicationClientCredentialPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return {@link PhoneNumberAsyncClient} instance
*/
public PhoneNumberAsyncClient buildAsyncClient() {
this.validateRequiredFields();
if (this.version != null) {
logger.info("Build client for service version" + this.version.getVersion());
}
return this.createPhoneNumberAsyncClient(this.createPhoneNumberAdminClient());
}
PhoneNumberAsyncClient createPhoneNumberAsyncClient(PhoneNumberAdminClientImpl phoneNumberAdminClient) {
return new PhoneNumberAsyncClient(phoneNumberAdminClient);
}
HttpPipelinePolicy createAuthenticationPolicy() {
if (this.tokenCredential != null && this.accessKeyCredential != null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used."));
}
if (this.tokenCredential != null) {
return new BearerTokenAuthenticationPolicy(
this.tokenCredential, "https:
} else if (this.accessKeyCredential != null) {
return new HmacAuthenticationPolicy(this.accessKeyCredential);
} else {
throw logger.logExceptionAsError(
new NullPointerException("Missing credential information while building a client."));
}
}
UserAgentPolicy createUserAgentPolicy(
String applicationId, String sdkName, String sdkVersion, Configuration configuration) {
return new UserAgentPolicy(applicationId, sdkName, sdkVersion, configuration);
}
RetryPolicy createRetryPolicy() {
return new RetryPolicy();
}
CookiePolicy createCookiePolicy() {
return new CookiePolicy();
}
HttpLoggingPolicy createHttpLoggingPolicy(HttpLogOptions httpLogOptions) {
return new HttpLoggingPolicy(httpLogOptions);
}
HttpLogOptions createDefaultHttpLogOptions() {
return new HttpLogOptions();
}
private void validateRequiredFields() {
Objects.requireNonNull(this.endpoint);
if (this.pipeline == null) {
Objects.requireNonNull(this.httpClient);
}
}
private PhoneNumberAdminClientImpl createPhoneNumberAdminClient() {
PhoneNumberAdminClientImplBuilder clientBuilder = new PhoneNumberAdminClientImplBuilder();
return clientBuilder
.endpoint(this.endpoint)
.pipeline(this.createHttpPipeline())
.buildClient();
}
private HttpPipeline createHttpPipeline() {
if (this.pipeline != null) {
return this.pipeline;
}
List<HttpPipelinePolicy> policyList = new ArrayList<>();
policyList.add(this.createAuthenticationPolicy());
policyList.add(this.createUserAgentPolicy(
this.getHttpLogOptions().getApplicationId(),
PROPERTIES.get(SDK_NAME),
PROPERTIES.get(SDK_VERSION),
this.configuration
));
policyList.add(this.createRetryPolicy());
policyList.add(this.createCookiePolicy());
if (this.additionalPolicies.size() > 0) {
policyList.addAll(this.additionalPolicies);
}
policyList.add(this.createHttpLoggingPolicy(this.getHttpLogOptions()));
return new HttpPipelineBuilder()
.policies(policyList.toArray(new HttpPipelinePolicy[0]))
.httpClient(this.httpClient)
.build();
}
private HttpLogOptions getHttpLogOptions() {
if (this.httpLogOptions == null) {
this.httpLogOptions = this.createDefaultHttpLogOptions();
}
return this.httpLogOptions;
}
} | class PhoneNumberClientBuilder {
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure-communication-administration.properties");
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final ClientLogger logger = new ClientLogger(PhoneNumberClientBuilder.class);
private PhoneNumberServiceVersion version;
private String endpoint;
private HttpPipeline pipeline;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private AzureKeyCredential azureKeyCredential;
private TokenCredential tokenCredential;
private Configuration configuration;
private final List<HttpPipelinePolicy> additionalPolicies = new ArrayList<>();
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code endpoint} is {@code null}.
*/
public PhoneNumberClientBuilder endpoint(String endpoint) {
this.endpoint = Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
return this;
}
/**
* Sets the HTTP pipeline to use for the service client
* <p>
* If {@code pipeline} is set, all other settings aside from
* {@link PhoneNumberClientBuilder
*
* @param pipeline HttpPipeline to use
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}
/**
* Set HttpClient to use
*
* @param httpClient HttpClient to use
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code httpClient} is {@code null}.
*/
public PhoneNumberClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = httpClient;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link HttpLogDetailLevel
*
* @param httpLogOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return the updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder httpLogOptions(HttpLogOptions httpLogOptions) {
this.httpLogOptions = httpLogOptions;
return this;
}
/**
* Set AzureKeyCredential for authorization
*
* @param accessKey access key for initalizing AzureKeyCredential
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code accessKey} is {@code null}.
*/
public PhoneNumberClientBuilder accessKey(String accessKey) {
Objects.requireNonNull(accessKey, "'accessKey' cannot be null.");
this.azureKeyCredential = new AzureKeyCredential(accessKey);
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests.
* @return The updated {@link CommunicationIdentityClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public PhoneNumberClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Set the endpoint and AzureKeyCredential for authorization
*
* @param connectionString connection string for setting endpoint and initalizing AzureKeyCredential
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code connectionString} is {@code null}.
*/
/**
* Sets the configuration object used to retrieve environment configuration values during building of the client.
*
* @param configuration Configuration store used to retrieve environment configurations.
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
* @return The updated {@link PhoneNumberClientBuilder} object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public PhoneNumberClientBuilder addPolicy(HttpPipelinePolicy policy) {
this.additionalPolicies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Sets the {@link PhoneNumberServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link PhoneNumberServiceVersion} of the service to be used when making requests.
* @return The updated {@link PhoneNumberClientBuilder} object.
*/
public PhoneNumberClientBuilder serviceVersion(PhoneNumberServiceVersion version) {
this.version = version;
return this;
}
/**
* Create synchronous client applying CommunicationClientCredentialPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return {@link PhoneNumberClient} instance
*/
public PhoneNumberClient buildClient() {
return new PhoneNumberClient(this.buildAsyncClient());
}
/**
* Create asynchronous client applying CommunicationClientCredentialPolicy,
* UserAgentPolicy, RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return {@link PhoneNumberAsyncClient} instance
*/
public PhoneNumberAsyncClient buildAsyncClient() {
this.validateRequiredFields();
if (this.version != null) {
logger.info("Build client for service version" + this.version.getVersion());
}
return this.createPhoneNumberAsyncClient(this.createPhoneNumberAdminClient());
}
PhoneNumberAsyncClient createPhoneNumberAsyncClient(PhoneNumberAdminClientImpl phoneNumberAdminClient) {
return new PhoneNumberAsyncClient(phoneNumberAdminClient);
}
HttpPipelinePolicy createAuthenticationPolicy() {
if (this.tokenCredential != null && this.azureKeyCredential != null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used."));
}
if (this.tokenCredential != null) {
return new BearerTokenAuthenticationPolicy(
this.tokenCredential, "https:
} else if (this.azureKeyCredential != null) {
return new HmacAuthenticationPolicy(this.azureKeyCredential);
} else {
throw logger.logExceptionAsError(
new NullPointerException("Missing credential information while building a client."));
}
}
UserAgentPolicy createUserAgentPolicy(
String applicationId, String sdkName, String sdkVersion, Configuration configuration) {
return new UserAgentPolicy(applicationId, sdkName, sdkVersion, configuration);
}
RetryPolicy createRetryPolicy() {
return new RetryPolicy();
}
CookiePolicy createCookiePolicy() {
return new CookiePolicy();
}
HttpLoggingPolicy createHttpLoggingPolicy(HttpLogOptions httpLogOptions) {
return new HttpLoggingPolicy(httpLogOptions);
}
HttpLogOptions createDefaultHttpLogOptions() {
return new HttpLogOptions();
}
private void validateRequiredFields() {
Objects.requireNonNull(this.endpoint);
if (this.pipeline == null) {
Objects.requireNonNull(this.httpClient);
}
}
private PhoneNumberAdminClientImpl createPhoneNumberAdminClient() {
PhoneNumberAdminClientImplBuilder clientBuilder = new PhoneNumberAdminClientImplBuilder();
return clientBuilder
.endpoint(this.endpoint)
.pipeline(this.createHttpPipeline())
.buildClient();
}
private HttpPipeline createHttpPipeline() {
if (this.pipeline != null) {
return this.pipeline;
}
List<HttpPipelinePolicy> policyList = new ArrayList<>();
policyList.add(this.createAuthenticationPolicy());
policyList.add(this.createUserAgentPolicy(
this.getHttpLogOptions().getApplicationId(),
PROPERTIES.get(SDK_NAME),
PROPERTIES.get(SDK_VERSION),
this.configuration
));
policyList.add(this.createRetryPolicy());
policyList.add(this.createCookiePolicy());
if (this.additionalPolicies.size() > 0) {
policyList.addAll(this.additionalPolicies);
}
policyList.add(this.createHttpLoggingPolicy(this.getHttpLogOptions()));
return new HttpPipelineBuilder()
.policies(policyList.toArray(new HttpPipelinePolicy[0]))
.httpClient(this.httpClient)
.build();
}
private HttpLogOptions getHttpLogOptions() {
if (this.httpLogOptions == null) {
this.httpLogOptions = this.createDefaultHttpLogOptions();
}
return this.httpLogOptions;
}
} |
duplicate, remove this. | public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))
.switchIfEmpty(Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
try {
return pc.acquireTokenSilently(forceParametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)));
} | if (request.getClaims() != null) { | public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))
.switchIfEmpty(Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(forceParametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)));
} | class IdentityClient {
private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
private static final Random RANDOM = new Random();
private static final String WINDOWS_STARTER = "cmd.exe";
private static final String LINUX_MAC_STARTER = "/bin/sh";
private static final String WINDOWS_SWITCHER = "/c";
private static final String LINUX_MAC_SWITCHER = "-c";
private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
private static final String LINUX_MAC_PROCESS_ERROR_MESSAGE = "(.*)az:(.*)not found";
private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
private static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final String DEFAULT_PUBLIC_CACHE_FILE_NAME = "msal.cache";
private static final String DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME = "msal.confidential.cache";
private static final Path DEFAULT_CACHE_FILE_PATH = Platform.isWindows()
? Paths.get(System.getProperty("user.home"), "AppData", "Local", ".IdentityService")
: Paths.get(System.getProperty("user.home"), ".IdentityService");
private static final String DEFAULT_KEYCHAIN_SERVICE = "Microsoft.Developer.IdentityService";
private static final String DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT = "MSALCache";
private static final String DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT = "MSALConfidentialCache";
private static final String DEFAULT_KEYRING_NAME = "default";
private static final String DEFAULT_KEYRING_SCHEMA = "msal.cache";
private static final String DEFAULT_PUBLIC_KEYRING_ITEM_NAME = DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME = DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_KEYRING_ATTR_NAME = "MsalClientID";
private static final String DEFAULT_KEYRING_ATTR_VALUE = "Microsoft.Developer.IdentityService";
private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
private static final String MSI_ENDPOINT_VERSION = "2017-09-01";
private static final String ADFS_TENANT = "adfs";
private static final String HTTP_LOCALHOST = "http:
private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
private final ClientLogger logger = new ClientLogger(IdentityClient.class);
private final IdentityClientOptions options;
private final String tenantId;
private final String clientId;
private final String clientSecret;
private final InputStream certificate;
private final String certificatePath;
private final String certificatePassword;
private HttpPipelineAdapter httpPipelineAdapter;
private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param options the options configuring the client.
*/
IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
IdentityClientOptions options) {
if (tenantId == null) {
tenantId = "organizations";
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.clientSecret = clientSecret;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.options = options;
this.publicClientApplicationAccessor = new SynchronizedAccessor<PublicClientApplication>(() ->
getPublicClientApplication(isSharedTokenCacheCredential));
this.confidentialClientApplicationAccessor = new SynchronizedAccessor<ConfidentialClientApplication>(() ->
getConfidentialClientApplication());
}
private ConfidentialClientApplication getConfidentialClientApplication() {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
InputStream pfxCertificateStream = getCertificateInputStream();
credential = ClientCredentialFactory.createFromCertificate(
pfxCertificateStream, certificatePassword);
}
} catch (IOException | GeneralSecurityException e) {
throw logger.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path"));
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT);
}
if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME,
DEFAULT_KEYRING_ATTR_VALUE, null, null);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
return applicationBuilder.build();
}
private PublicClientApplication getPublicClientApplication(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId);
try {
publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
publicClientApplicationBuilder.httpClient(httpPipelineAdapter);
} else {
publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
publicClientApplicationBuilder.executorService(options.getExecutorService());
}
Set<String> set = new HashSet<>(1);
set.add("CP1");
publicClientApplicationBuilder.clientCapabilities(set);
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_PUBLIC_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isWindows()) {
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_PUBLIC_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME, DEFAULT_KEYRING_ATTR_VALUE,
null, null);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
String message = "Shared token cache is unavailable in this environment.";
if (sharedTokenCacheCredential) {
throw logger.logExceptionAsError(new CredentialUnavailableException(message, t));
} else {
throw logger.logExceptionAsError(new ClientAuthenticationException(message, null, t));
}
}
}
return publicClientApplicationBuilder.build();
}
public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) {
try {
IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath());
IntelliJAuthMethodDetails authDetails = cacheAccessor.getAuthDetailsIfAvailable();
String authType = authDetails.getAuthMethod();
if (authType.equalsIgnoreCase("SP")) {
Map<String, String> spDetails = cacheAccessor
.getIntellijServicePrincipalDetails(authDetails.getCredFilePath());
String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant");
try {
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(spDetails.get("client"),
ClientCredentialFactory.createFromSecret(spDetails.get("key")))
.authority(authorityUrl);
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else if (options.getProxyOptions() != null) {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
ConfidentialClientApplication application = applicationBuilder.build();
return Mono.fromFuture(application.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.build())).map(MsalToken::new);
} catch (MalformedURLException e) {
return Mono.error(e);
}
} else if (authType.equalsIgnoreCase("DC")) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("IntelliJCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials();
String refreshToken = intelliJCredentials.get("refreshToken").textValue();
RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder =
RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
refreshTokenParametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build()))
.map(MsalToken::new));
} else {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"IntelliJ Authentication not available."
+ " Please login with Azure Tools for IntelliJ plugin in the IDE."));
}
} catch (IOException e) {
return Mono.error(e);
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) {
String azCommand = "az account get-access-token --output json --resource ";
StringBuilder command = new StringBuilder();
command.append(azCommand);
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
return Mono.error(logger.logExceptionAsError(ex));
}
command.append(scopes);
AccessToken token = null;
BufferedReader reader = null;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, command.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw logger.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from."));
}
builder.redirectErrorStream(true);
Process process = builder.start();
reader = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
String line;
StringBuilder output = new StringBuilder();
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || line.matches(LINUX_MAC_PROCESS_ERROR_MESSAGE)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed"));
}
output.append(line);
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo("\"accessToken\": \"(.*?)(\"|$)", processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account"));
}
throw logger.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw logger.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw logger.logExceptionAsError(new IllegalStateException(e));
} catch (RuntimeException e) {
return Mono.error(logger.logExceptionAsError(e));
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException ex) {
return Mono.error(logger.logExceptionAsError(new IllegalStateException(ex)));
}
}
return Mono.just(token);
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))
.map(MsalToken::new));
}
private HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(userNamePasswordParametersBuilder.build());
}
)).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and "
+ "password", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to login to acquire the last token
* @return a Publisher that emits an AccessToken
*/
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
try {
return confidentialClient.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(ar -> (AccessToken) new MsalToken(ar))
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))));
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
return publicClientApplicationAccessor.getValue().flatMap(pc ->
Mono.fromFuture(() -> {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(parametersBuilder.build());
}).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t))
.map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with Visual Sutdio cached refresh token.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken.
*/
public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("VsCodeCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor();
String credential = accessor.getCredentials("VS Code Azure", cloud);
RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters
.builder(new HashSet<>(request.getScopes()), credential);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())).map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow.
*
* @param request the details of the token request
* @param authorizationCode the oauth2 authorization code
* @param redirectUrl the redirectUrl where the authorization code is sent to
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode,
URI redirectUrl) {
AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder =
AuthorizationCodeParameters.builder(authorizationCode, redirectUrl)
.scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken;
if (clientSecret != null) {
acquireToken = confidentialClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
} else {
acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
}
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with authorization code", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
return Mono.error(logger.logExceptionAsError(new RuntimeException(e)));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri).scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build())));
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new);
}
/**
* Gets token from shared token cache
* */
public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts())
.onErrorMap(t -> new CredentialUnavailableException(
"Cannot get accounts from token cache. Error: " + t.getMessage(), t))
.flatMap(set -> {
IAccount requestedAccount;
Map<String, IAccount> accounts = new HashMap<>();
if (set.isEmpty()) {
return Mono.error(new CredentialUnavailableException("SharedTokenCacheCredential "
+ "authentication unavailable. No accounts were found in the cache."));
}
for (IAccount cached : set) {
if (username == null || username.equals(cached.username())) {
if (!accounts.containsKey(cached.homeAccountId())) {
accounts.put(cached.homeAccountId(), cached);
}
}
}
if (accounts.isEmpty()) {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. No account matching the specified username: %s was "
+ "found in the cache.", username)));
} else if (accounts.size() > 1) {
if (username == null) {
return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication "
+ "unavailable. Multiple accounts were found in the cache. Use username and "
+ "tenant id to disambiguate."));
} else {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. Multiple accounts matching the specified username: "
+ "%s were found in the cache.", username)));
}
} else {
requestedAccount = accounts.values().iterator().next();
}
return authenticateWithPublicClientCache(request, requestedAccount);
}));
}
/**
* Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode("2019-11-01", "UTF-8"));
URL url = new URL(String.format("%s?%s", identityEndpoint, payload));
String secretKey = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
} catch (IOException e) {
if (connection == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Failed to initialize "
+ "Http URL connection to the endpoint.",
null, e));
}
int status = connection.getResponseCode();
if (status != 401) {
throw logger.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401"
+ " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status),
null, e));
}
String realm = connection.getHeaderField("WWW-Authenticate");
if (realm == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
int separatorIndex = realm.indexOf("=");
if (separatorIndex == -1) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
String secretKeyPath = realm.substring(separatorIndex + 1);
secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8);
} finally {
if (connection != null) {
connection.disconnect();
}
}
if (secretKey == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value"
+ " in the response from Azure Arc Managed Identity Endpoint",
null));
}
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey));
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner scanner = new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
String result = scanner.hasNext() ? scanner.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint,
String identityHeader,
String thumbprint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpsURLConnection connection = null;
String endpoint = identityEndpoint;
String headerValue = identityHeader;
String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION;
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpsURLConnection) url.openConnection();
IdentitySslUtil.addTrustedCertificateThumbprint(getClass().getSimpleName(), connection,
thumbprint);
connection.setRequestMethod("GET");
if (headerValue != null) {
connection.setRequestProperty("Secret", headerValue);
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the App Service Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param msiEndpoint the MSI endpoint to acquire token from
* @param msiSecret the msi secret to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader,
String msiEndpoint, String msiSecret,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
String endpoint;
String headerValue;
String endpointVersion;
if (identityEndpoint != null) {
endpoint = identityEndpoint;
headerValue = identityHeader;
endpointVersion = IDENTITY_ENDPOINT_VERSION;
} else {
endpoint = msiEndpoint;
headerValue = msiSecret;
endpointVersion = MSI_ENDPOINT_VERSION;
}
String resource = ScopeUtil.scopesToResource(request.getScopes());
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
payload.append("&client_id=");
} else {
payload.append("&clientid=");
}
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
if (headerValue != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
connection.setRequestProperty("X-IDENTITY-HEADER", headerValue);
} else {
connection.setRequestProperty("Secret", headerValue);
}
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Virtual Machine IMDS endpoint.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) {
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
final int imdsUpgradeTimeInMs = 70 * 1000;
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
payload.append("&resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
} catch (IOException exception) {
return Mono.error(exception);
}
return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> {
int retry = 1;
while (retry <= options.getMaxRetry()) {
URL url = null;
HttpURLConnection connection = null;
try {
url =
new URL(String.format("http:
payload.toString()));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} catch (IOException exception) {
if (connection == null) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Could not connect to the url: %s.", url), exception));
}
int responseCode;
try {
responseCode = connection.getResponseCode();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
}
if (responseCode == 400) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established.", null));
}
if (responseCode == 410
|| responseCode == 429
|| responseCode == 404
|| (responseCode >= 500 && responseCode <= 599)) {
int retryTimeoutInMs = options.getRetryTimeout()
.apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000;
retryTimeoutInMs =
(responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs
: retryTimeoutInMs;
retry++;
if (retry > options.getMaxRetry()) {
break;
} else {
sleep(retryTimeoutInMs);
}
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Couldn't acquire access token from IMDS, verify your objectId, "
+ "clientId or msiResourceId", exception));
}
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
throw logger.logExceptionAsError(new RuntimeException(
String.format("MSI: Failed to acquire tokens after retrying %s times",
options.getMaxRetry())));
}));
}
private Mono<Boolean> checkIMDSAvailable() {
StringBuilder payload = new StringBuilder();
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
} catch (IOException exception) {
return Mono.error(exception);
}
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
URL url = new URL(String.format("http:
payload.toString()));
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setConnectTimeout(500);
connection.connect();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
} finally {
if (connection != null) {
connection.disconnect();
}
}
return true;
});
}
private static void sleep(int millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Type.HTTP, options.getAddress());
}
}
private String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
private boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
private String redactInfo(String regex, String input) {
return input.replaceAll(regex, "****");
}
void openUrl(String url) throws IOException {
Runtime rt = Runtime.getRuntime();
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (os.contains("win")) {
rt.exec("rundll32 url.dll,FileProtocolHandler " + url);
} else if (os.contains("mac")) {
rt.exec("open " + url);
} else if (os.contains("nix") || os.contains("nux")) {
rt.exec("xdg-open " + url);
} else {
logger.error("Browser could not be opened - please open {} in a browser on this device.", url);
}
}
private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) {
CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(e);
return completableFuture;
}
private void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient));
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()));
}
}
}
/**
* Get the configured tenant id.
*
* @return the tenant id.
*/
public String getTenantId() {
return tenantId;
}
/**
* Get the configured client id.
*
* @return the client id.
*/
public String getClientId() {
return clientId;
}
private boolean isADFSTenant() {
return this.tenantId.equals(ADFS_TENANT);
}
private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new FileInputStream(certificatePath);
} else if (certificate != null) {
return certificate;
} else {
return null;
}
}
} | class IdentityClient {
private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
private static final Random RANDOM = new Random();
private static final String WINDOWS_STARTER = "cmd.exe";
private static final String LINUX_MAC_STARTER = "/bin/sh";
private static final String WINDOWS_SWITCHER = "/c";
private static final String LINUX_MAC_SWITCHER = "-c";
private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
private static final String LINUX_MAC_PROCESS_ERROR_MESSAGE = "(.*)az:(.*)not found";
private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
private static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final String DEFAULT_PUBLIC_CACHE_FILE_NAME = "msal.cache";
private static final String DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME = "msal.confidential.cache";
private static final Path DEFAULT_CACHE_FILE_PATH = Platform.isWindows()
? Paths.get(System.getProperty("user.home"), "AppData", "Local", ".IdentityService")
: Paths.get(System.getProperty("user.home"), ".IdentityService");
private static final String DEFAULT_KEYCHAIN_SERVICE = "Microsoft.Developer.IdentityService";
private static final String DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT = "MSALCache";
private static final String DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT = "MSALConfidentialCache";
private static final String DEFAULT_KEYRING_NAME = "default";
private static final String DEFAULT_KEYRING_SCHEMA = "msal.cache";
private static final String DEFAULT_PUBLIC_KEYRING_ITEM_NAME = DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME = DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_KEYRING_ATTR_NAME = "MsalClientID";
private static final String DEFAULT_KEYRING_ATTR_VALUE = "Microsoft.Developer.IdentityService";
private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
private static final String MSI_ENDPOINT_VERSION = "2017-09-01";
private static final String ADFS_TENANT = "adfs";
private static final String HTTP_LOCALHOST = "http:
private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
private final ClientLogger logger = new ClientLogger(IdentityClient.class);
private final IdentityClientOptions options;
private final String tenantId;
private final String clientId;
private final String clientSecret;
private final InputStream certificate;
private final String certificatePath;
private final String certificatePassword;
private HttpPipelineAdapter httpPipelineAdapter;
private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param options the options configuring the client.
*/
IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
IdentityClientOptions options) {
if (tenantId == null) {
tenantId = "organizations";
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.clientSecret = clientSecret;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.options = options;
this.publicClientApplicationAccessor = new SynchronizedAccessor<PublicClientApplication>(() ->
getPublicClientApplication(isSharedTokenCacheCredential));
this.confidentialClientApplicationAccessor = new SynchronizedAccessor<ConfidentialClientApplication>(() ->
getConfidentialClientApplication());
}
private ConfidentialClientApplication getConfidentialClientApplication() {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
InputStream pfxCertificateStream = getCertificateInputStream();
credential = ClientCredentialFactory.createFromCertificate(
pfxCertificateStream, certificatePassword);
}
} catch (IOException | GeneralSecurityException e) {
throw logger.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path"));
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT);
}
if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME,
DEFAULT_KEYRING_ATTR_VALUE, null, null);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
return applicationBuilder.build();
}
private PublicClientApplication getPublicClientApplication(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId);
try {
publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
publicClientApplicationBuilder.httpClient(httpPipelineAdapter);
} else {
publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
publicClientApplicationBuilder.executorService(options.getExecutorService());
}
Set<String> set = new HashSet<>(1);
set.add("CP1");
publicClientApplicationBuilder.clientCapabilities(set);
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_PUBLIC_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isWindows()) {
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_PUBLIC_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME, DEFAULT_KEYRING_ATTR_VALUE,
null, null);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
String message = "Shared token cache is unavailable in this environment.";
if (sharedTokenCacheCredential) {
throw logger.logExceptionAsError(new CredentialUnavailableException(message, t));
} else {
throw logger.logExceptionAsError(new ClientAuthenticationException(message, null, t));
}
}
}
return publicClientApplicationBuilder.build();
}
public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) {
try {
IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath());
IntelliJAuthMethodDetails authDetails = cacheAccessor.getAuthDetailsIfAvailable();
String authType = authDetails.getAuthMethod();
if (authType.equalsIgnoreCase("SP")) {
Map<String, String> spDetails = cacheAccessor
.getIntellijServicePrincipalDetails(authDetails.getCredFilePath());
String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant");
try {
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(spDetails.get("client"),
ClientCredentialFactory.createFromSecret(spDetails.get("key")))
.authority(authorityUrl);
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else if (options.getProxyOptions() != null) {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
ConfidentialClientApplication application = applicationBuilder.build();
return Mono.fromFuture(application.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.build())).map(MsalToken::new);
} catch (MalformedURLException e) {
return Mono.error(e);
}
} else if (authType.equalsIgnoreCase("DC")) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("IntelliJCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials();
String refreshToken = intelliJCredentials.get("refreshToken").textValue();
RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder =
RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
refreshTokenParametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build()))
.map(MsalToken::new));
} else {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"IntelliJ Authentication not available."
+ " Please login with Azure Tools for IntelliJ plugin in the IDE."));
}
} catch (IOException e) {
return Mono.error(e);
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) {
String azCommand = "az account get-access-token --output json --resource ";
StringBuilder command = new StringBuilder();
command.append(azCommand);
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
return Mono.error(logger.logExceptionAsError(ex));
}
command.append(scopes);
AccessToken token = null;
BufferedReader reader = null;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, command.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw logger.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from."));
}
builder.redirectErrorStream(true);
Process process = builder.start();
reader = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
String line;
StringBuilder output = new StringBuilder();
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || line.matches(LINUX_MAC_PROCESS_ERROR_MESSAGE)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed"));
}
output.append(line);
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo("\"accessToken\": \"(.*?)(\"|$)", processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account"));
}
throw logger.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw logger.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw logger.logExceptionAsError(new IllegalStateException(e));
} catch (RuntimeException e) {
return Mono.error(logger.logExceptionAsError(e));
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException ex) {
return Mono.error(logger.logExceptionAsError(new IllegalStateException(ex)));
}
}
return Mono.just(token);
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))
.map(MsalToken::new));
}
private HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(userNamePasswordParametersBuilder.build());
}
)).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and "
+ "password", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to login to acquire the last token
* @return a Publisher that emits an AccessToken
*/
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
try {
return confidentialClient.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(ar -> (AccessToken) new MsalToken(ar))
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))));
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
return publicClientApplicationAccessor.getValue().flatMap(pc ->
Mono.fromFuture(() -> {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(parametersBuilder.build());
}).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t))
.map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with Visual Sutdio cached refresh token.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken.
*/
public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("VsCodeCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor();
String credential = accessor.getCredentials("VS Code Azure", cloud);
RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters
.builder(new HashSet<>(request.getScopes()), credential);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())).map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow.
*
* @param request the details of the token request
* @param authorizationCode the oauth2 authorization code
* @param redirectUrl the redirectUrl where the authorization code is sent to
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode,
URI redirectUrl) {
AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder =
AuthorizationCodeParameters.builder(authorizationCode, redirectUrl)
.scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken;
if (clientSecret != null) {
acquireToken = confidentialClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
} else {
acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
}
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with authorization code", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
return Mono.error(logger.logExceptionAsError(new RuntimeException(e)));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri).scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build())));
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new);
}
/**
* Gets token from shared token cache
* */
public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts())
.onErrorMap(t -> new CredentialUnavailableException(
"Cannot get accounts from token cache. Error: " + t.getMessage(), t))
.flatMap(set -> {
IAccount requestedAccount;
Map<String, IAccount> accounts = new HashMap<>();
if (set.isEmpty()) {
return Mono.error(new CredentialUnavailableException("SharedTokenCacheCredential "
+ "authentication unavailable. No accounts were found in the cache."));
}
for (IAccount cached : set) {
if (username == null || username.equals(cached.username())) {
if (!accounts.containsKey(cached.homeAccountId())) {
accounts.put(cached.homeAccountId(), cached);
}
}
}
if (accounts.isEmpty()) {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. No account matching the specified username: %s was "
+ "found in the cache.", username)));
} else if (accounts.size() > 1) {
if (username == null) {
return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication "
+ "unavailable. Multiple accounts were found in the cache. Use username and "
+ "tenant id to disambiguate."));
} else {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. Multiple accounts matching the specified username: "
+ "%s were found in the cache.", username)));
}
} else {
requestedAccount = accounts.values().iterator().next();
}
return authenticateWithPublicClientCache(request, requestedAccount);
}));
}
/**
* Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode("2019-11-01", "UTF-8"));
URL url = new URL(String.format("%s?%s", identityEndpoint, payload));
String secretKey = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
} catch (IOException e) {
if (connection == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Failed to initialize "
+ "Http URL connection to the endpoint.",
null, e));
}
int status = connection.getResponseCode();
if (status != 401) {
throw logger.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401"
+ " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status),
null, e));
}
String realm = connection.getHeaderField("WWW-Authenticate");
if (realm == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
int separatorIndex = realm.indexOf("=");
if (separatorIndex == -1) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
String secretKeyPath = realm.substring(separatorIndex + 1);
secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8);
} finally {
if (connection != null) {
connection.disconnect();
}
}
if (secretKey == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value"
+ " in the response from Azure Arc Managed Identity Endpoint",
null));
}
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey));
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner scanner = new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
String result = scanner.hasNext() ? scanner.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint,
String identityHeader,
String thumbprint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpsURLConnection connection = null;
String endpoint = identityEndpoint;
String headerValue = identityHeader;
String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION;
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpsURLConnection) url.openConnection();
IdentitySslUtil.addTrustedCertificateThumbprint(getClass().getSimpleName(), connection,
thumbprint);
connection.setRequestMethod("GET");
if (headerValue != null) {
connection.setRequestProperty("Secret", headerValue);
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the App Service Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param msiEndpoint the MSI endpoint to acquire token from
* @param msiSecret the msi secret to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader,
String msiEndpoint, String msiSecret,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
String endpoint;
String headerValue;
String endpointVersion;
if (identityEndpoint != null) {
endpoint = identityEndpoint;
headerValue = identityHeader;
endpointVersion = IDENTITY_ENDPOINT_VERSION;
} else {
endpoint = msiEndpoint;
headerValue = msiSecret;
endpointVersion = MSI_ENDPOINT_VERSION;
}
String resource = ScopeUtil.scopesToResource(request.getScopes());
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
payload.append("&client_id=");
} else {
payload.append("&clientid=");
}
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
if (headerValue != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
connection.setRequestProperty("X-IDENTITY-HEADER", headerValue);
} else {
connection.setRequestProperty("Secret", headerValue);
}
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Virtual Machine IMDS endpoint.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) {
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
final int imdsUpgradeTimeInMs = 70 * 1000;
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
payload.append("&resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
} catch (IOException exception) {
return Mono.error(exception);
}
return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> {
int retry = 1;
while (retry <= options.getMaxRetry()) {
URL url = null;
HttpURLConnection connection = null;
try {
url =
new URL(String.format("http:
payload.toString()));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} catch (IOException exception) {
if (connection == null) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Could not connect to the url: %s.", url), exception));
}
int responseCode;
try {
responseCode = connection.getResponseCode();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
}
if (responseCode == 400) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established.", null));
}
if (responseCode == 410
|| responseCode == 429
|| responseCode == 404
|| (responseCode >= 500 && responseCode <= 599)) {
int retryTimeoutInMs = options.getRetryTimeout()
.apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000;
retryTimeoutInMs =
(responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs
: retryTimeoutInMs;
retry++;
if (retry > options.getMaxRetry()) {
break;
} else {
sleep(retryTimeoutInMs);
}
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Couldn't acquire access token from IMDS, verify your objectId, "
+ "clientId or msiResourceId", exception));
}
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
throw logger.logExceptionAsError(new RuntimeException(
String.format("MSI: Failed to acquire tokens after retrying %s times",
options.getMaxRetry())));
}));
}
private Mono<Boolean> checkIMDSAvailable() {
StringBuilder payload = new StringBuilder();
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
} catch (IOException exception) {
return Mono.error(exception);
}
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
URL url = new URL(String.format("http:
payload.toString()));
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setConnectTimeout(500);
connection.connect();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
} finally {
if (connection != null) {
connection.disconnect();
}
}
return true;
});
}
private static void sleep(int millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Type.HTTP, options.getAddress());
}
}
private String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
private boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
private String redactInfo(String regex, String input) {
return input.replaceAll(regex, "****");
}
void openUrl(String url) throws IOException {
Runtime rt = Runtime.getRuntime();
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (os.contains("win")) {
rt.exec("rundll32 url.dll,FileProtocolHandler " + url);
} else if (os.contains("mac")) {
rt.exec("open " + url);
} else if (os.contains("nix") || os.contains("nux")) {
rt.exec("xdg-open " + url);
} else {
logger.error("Browser could not be opened - please open {} in a browser on this device.", url);
}
}
private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) {
CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(e);
return completableFuture;
}
private void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient));
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()));
}
}
}
/**
* Get the configured tenant id.
*
* @return the tenant id.
*/
public String getTenantId() {
return tenantId;
}
/**
* Get the configured client id.
*
* @return the client id.
*/
public String getClientId() {
return clientId;
}
private boolean isADFSTenant() {
return this.tenantId.equals(ADFS_TENANT);
}
private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new FileInputStream(certificatePath);
} else if (certificate != null) {
return certificate;
} else {
return null;
}
}
} |
Isn't this already handled above (line 589)? or am I missreading the control scope? | public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))
.switchIfEmpty(Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
try {
return pc.acquireTokenSilently(forceParametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)));
} | if (request.getClaims() != null) { | public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
parametersBuilder.forceRefresh(true);
}
if (account != null) {
parametersBuilder = parametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))
.switchIfEmpty(Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes())).forceRefresh(true);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
forceParametersBuilder.claims(customClaimRequest);
}
if (account != null) {
forceParametersBuilder = forceParametersBuilder.account(account);
}
try {
return pc.acquireTokenSilently(forceParametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(MsalToken::new)));
} | class IdentityClient {
private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
private static final Random RANDOM = new Random();
private static final String WINDOWS_STARTER = "cmd.exe";
private static final String LINUX_MAC_STARTER = "/bin/sh";
private static final String WINDOWS_SWITCHER = "/c";
private static final String LINUX_MAC_SWITCHER = "-c";
private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
private static final String LINUX_MAC_PROCESS_ERROR_MESSAGE = "(.*)az:(.*)not found";
private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
private static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final String DEFAULT_PUBLIC_CACHE_FILE_NAME = "msal.cache";
private static final String DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME = "msal.confidential.cache";
private static final Path DEFAULT_CACHE_FILE_PATH = Platform.isWindows()
? Paths.get(System.getProperty("user.home"), "AppData", "Local", ".IdentityService")
: Paths.get(System.getProperty("user.home"), ".IdentityService");
private static final String DEFAULT_KEYCHAIN_SERVICE = "Microsoft.Developer.IdentityService";
private static final String DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT = "MSALCache";
private static final String DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT = "MSALConfidentialCache";
private static final String DEFAULT_KEYRING_NAME = "default";
private static final String DEFAULT_KEYRING_SCHEMA = "msal.cache";
private static final String DEFAULT_PUBLIC_KEYRING_ITEM_NAME = DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME = DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_KEYRING_ATTR_NAME = "MsalClientID";
private static final String DEFAULT_KEYRING_ATTR_VALUE = "Microsoft.Developer.IdentityService";
private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
private static final String MSI_ENDPOINT_VERSION = "2017-09-01";
private static final String ADFS_TENANT = "adfs";
private static final String HTTP_LOCALHOST = "http:
private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
private final ClientLogger logger = new ClientLogger(IdentityClient.class);
private final IdentityClientOptions options;
private final String tenantId;
private final String clientId;
private final String clientSecret;
private final InputStream certificate;
private final String certificatePath;
private final String certificatePassword;
private HttpPipelineAdapter httpPipelineAdapter;
private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param options the options configuring the client.
*/
IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
IdentityClientOptions options) {
if (tenantId == null) {
tenantId = "organizations";
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.clientSecret = clientSecret;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.options = options;
this.publicClientApplicationAccessor = new SynchronizedAccessor<PublicClientApplication>(() ->
getPublicClientApplication(isSharedTokenCacheCredential));
this.confidentialClientApplicationAccessor = new SynchronizedAccessor<ConfidentialClientApplication>(() ->
getConfidentialClientApplication());
}
private ConfidentialClientApplication getConfidentialClientApplication() {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
InputStream pfxCertificateStream = getCertificateInputStream();
credential = ClientCredentialFactory.createFromCertificate(
pfxCertificateStream, certificatePassword);
}
} catch (IOException | GeneralSecurityException e) {
throw logger.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path"));
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT);
}
if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME,
DEFAULT_KEYRING_ATTR_VALUE, null, null);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
return applicationBuilder.build();
}
private PublicClientApplication getPublicClientApplication(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId);
try {
publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
publicClientApplicationBuilder.httpClient(httpPipelineAdapter);
} else {
publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
publicClientApplicationBuilder.executorService(options.getExecutorService());
}
Set<String> set = new HashSet<>(1);
set.add("CP1");
publicClientApplicationBuilder.clientCapabilities(set);
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_PUBLIC_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isWindows()) {
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_PUBLIC_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME, DEFAULT_KEYRING_ATTR_VALUE,
null, null);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
String message = "Shared token cache is unavailable in this environment.";
if (sharedTokenCacheCredential) {
throw logger.logExceptionAsError(new CredentialUnavailableException(message, t));
} else {
throw logger.logExceptionAsError(new ClientAuthenticationException(message, null, t));
}
}
}
return publicClientApplicationBuilder.build();
}
public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) {
try {
IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath());
IntelliJAuthMethodDetails authDetails = cacheAccessor.getAuthDetailsIfAvailable();
String authType = authDetails.getAuthMethod();
if (authType.equalsIgnoreCase("SP")) {
Map<String, String> spDetails = cacheAccessor
.getIntellijServicePrincipalDetails(authDetails.getCredFilePath());
String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant");
try {
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(spDetails.get("client"),
ClientCredentialFactory.createFromSecret(spDetails.get("key")))
.authority(authorityUrl);
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else if (options.getProxyOptions() != null) {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
ConfidentialClientApplication application = applicationBuilder.build();
return Mono.fromFuture(application.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.build())).map(MsalToken::new);
} catch (MalformedURLException e) {
return Mono.error(e);
}
} else if (authType.equalsIgnoreCase("DC")) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("IntelliJCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials();
String refreshToken = intelliJCredentials.get("refreshToken").textValue();
RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder =
RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
refreshTokenParametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build()))
.map(MsalToken::new));
} else {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"IntelliJ Authentication not available."
+ " Please login with Azure Tools for IntelliJ plugin in the IDE."));
}
} catch (IOException e) {
return Mono.error(e);
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) {
String azCommand = "az account get-access-token --output json --resource ";
StringBuilder command = new StringBuilder();
command.append(azCommand);
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
return Mono.error(logger.logExceptionAsError(ex));
}
command.append(scopes);
AccessToken token = null;
BufferedReader reader = null;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, command.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw logger.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from."));
}
builder.redirectErrorStream(true);
Process process = builder.start();
reader = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
String line;
StringBuilder output = new StringBuilder();
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || line.matches(LINUX_MAC_PROCESS_ERROR_MESSAGE)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed"));
}
output.append(line);
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo("\"accessToken\": \"(.*?)(\"|$)", processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account"));
}
throw logger.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw logger.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw logger.logExceptionAsError(new IllegalStateException(e));
} catch (RuntimeException e) {
return Mono.error(logger.logExceptionAsError(e));
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException ex) {
return Mono.error(logger.logExceptionAsError(new IllegalStateException(ex)));
}
}
return Mono.just(token);
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))
.map(MsalToken::new));
}
private HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(userNamePasswordParametersBuilder.build());
}
)).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and "
+ "password", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to login to acquire the last token
* @return a Publisher that emits an AccessToken
*/
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
try {
return confidentialClient.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(ar -> (AccessToken) new MsalToken(ar))
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))));
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
return publicClientApplicationAccessor.getValue().flatMap(pc ->
Mono.fromFuture(() -> {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(parametersBuilder.build());
}).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t))
.map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with Visual Sutdio cached refresh token.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken.
*/
public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("VsCodeCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor();
String credential = accessor.getCredentials("VS Code Azure", cloud);
RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters
.builder(new HashSet<>(request.getScopes()), credential);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())).map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow.
*
* @param request the details of the token request
* @param authorizationCode the oauth2 authorization code
* @param redirectUrl the redirectUrl where the authorization code is sent to
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode,
URI redirectUrl) {
AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder =
AuthorizationCodeParameters.builder(authorizationCode, redirectUrl)
.scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken;
if (clientSecret != null) {
acquireToken = confidentialClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
} else {
acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
}
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with authorization code", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
return Mono.error(logger.logExceptionAsError(new RuntimeException(e)));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri).scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build())));
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new);
}
/**
* Gets token from shared token cache
* */
public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts())
.onErrorMap(t -> new CredentialUnavailableException(
"Cannot get accounts from token cache. Error: " + t.getMessage(), t))
.flatMap(set -> {
IAccount requestedAccount;
Map<String, IAccount> accounts = new HashMap<>();
if (set.isEmpty()) {
return Mono.error(new CredentialUnavailableException("SharedTokenCacheCredential "
+ "authentication unavailable. No accounts were found in the cache."));
}
for (IAccount cached : set) {
if (username == null || username.equals(cached.username())) {
if (!accounts.containsKey(cached.homeAccountId())) {
accounts.put(cached.homeAccountId(), cached);
}
}
}
if (accounts.isEmpty()) {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. No account matching the specified username: %s was "
+ "found in the cache.", username)));
} else if (accounts.size() > 1) {
if (username == null) {
return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication "
+ "unavailable. Multiple accounts were found in the cache. Use username and "
+ "tenant id to disambiguate."));
} else {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. Multiple accounts matching the specified username: "
+ "%s were found in the cache.", username)));
}
} else {
requestedAccount = accounts.values().iterator().next();
}
return authenticateWithPublicClientCache(request, requestedAccount);
}));
}
/**
* Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode("2019-11-01", "UTF-8"));
URL url = new URL(String.format("%s?%s", identityEndpoint, payload));
String secretKey = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
} catch (IOException e) {
if (connection == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Failed to initialize "
+ "Http URL connection to the endpoint.",
null, e));
}
int status = connection.getResponseCode();
if (status != 401) {
throw logger.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401"
+ " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status),
null, e));
}
String realm = connection.getHeaderField("WWW-Authenticate");
if (realm == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
int separatorIndex = realm.indexOf("=");
if (separatorIndex == -1) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
String secretKeyPath = realm.substring(separatorIndex + 1);
secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8);
} finally {
if (connection != null) {
connection.disconnect();
}
}
if (secretKey == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value"
+ " in the response from Azure Arc Managed Identity Endpoint",
null));
}
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey));
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner scanner = new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
String result = scanner.hasNext() ? scanner.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint,
String identityHeader,
String thumbprint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpsURLConnection connection = null;
String endpoint = identityEndpoint;
String headerValue = identityHeader;
String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION;
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpsURLConnection) url.openConnection();
IdentitySslUtil.addTrustedCertificateThumbprint(getClass().getSimpleName(), connection,
thumbprint);
connection.setRequestMethod("GET");
if (headerValue != null) {
connection.setRequestProperty("Secret", headerValue);
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the App Service Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param msiEndpoint the MSI endpoint to acquire token from
* @param msiSecret the msi secret to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader,
String msiEndpoint, String msiSecret,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
String endpoint;
String headerValue;
String endpointVersion;
if (identityEndpoint != null) {
endpoint = identityEndpoint;
headerValue = identityHeader;
endpointVersion = IDENTITY_ENDPOINT_VERSION;
} else {
endpoint = msiEndpoint;
headerValue = msiSecret;
endpointVersion = MSI_ENDPOINT_VERSION;
}
String resource = ScopeUtil.scopesToResource(request.getScopes());
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
payload.append("&client_id=");
} else {
payload.append("&clientid=");
}
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
if (headerValue != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
connection.setRequestProperty("X-IDENTITY-HEADER", headerValue);
} else {
connection.setRequestProperty("Secret", headerValue);
}
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Virtual Machine IMDS endpoint.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) {
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
final int imdsUpgradeTimeInMs = 70 * 1000;
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
payload.append("&resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
} catch (IOException exception) {
return Mono.error(exception);
}
return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> {
int retry = 1;
while (retry <= options.getMaxRetry()) {
URL url = null;
HttpURLConnection connection = null;
try {
url =
new URL(String.format("http:
payload.toString()));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} catch (IOException exception) {
if (connection == null) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Could not connect to the url: %s.", url), exception));
}
int responseCode;
try {
responseCode = connection.getResponseCode();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
}
if (responseCode == 400) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established.", null));
}
if (responseCode == 410
|| responseCode == 429
|| responseCode == 404
|| (responseCode >= 500 && responseCode <= 599)) {
int retryTimeoutInMs = options.getRetryTimeout()
.apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000;
retryTimeoutInMs =
(responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs
: retryTimeoutInMs;
retry++;
if (retry > options.getMaxRetry()) {
break;
} else {
sleep(retryTimeoutInMs);
}
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Couldn't acquire access token from IMDS, verify your objectId, "
+ "clientId or msiResourceId", exception));
}
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
throw logger.logExceptionAsError(new RuntimeException(
String.format("MSI: Failed to acquire tokens after retrying %s times",
options.getMaxRetry())));
}));
}
private Mono<Boolean> checkIMDSAvailable() {
StringBuilder payload = new StringBuilder();
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
} catch (IOException exception) {
return Mono.error(exception);
}
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
URL url = new URL(String.format("http:
payload.toString()));
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setConnectTimeout(500);
connection.connect();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
} finally {
if (connection != null) {
connection.disconnect();
}
}
return true;
});
}
private static void sleep(int millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Type.HTTP, options.getAddress());
}
}
private String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
private boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
private String redactInfo(String regex, String input) {
return input.replaceAll(regex, "****");
}
void openUrl(String url) throws IOException {
Runtime rt = Runtime.getRuntime();
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (os.contains("win")) {
rt.exec("rundll32 url.dll,FileProtocolHandler " + url);
} else if (os.contains("mac")) {
rt.exec("open " + url);
} else if (os.contains("nix") || os.contains("nux")) {
rt.exec("xdg-open " + url);
} else {
logger.error("Browser could not be opened - please open {} in a browser on this device.", url);
}
}
private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) {
CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(e);
return completableFuture;
}
private void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient));
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()));
}
}
}
/**
* Get the configured tenant id.
*
* @return the tenant id.
*/
public String getTenantId() {
return tenantId;
}
/**
* Get the configured client id.
*
* @return the client id.
*/
public String getClientId() {
return clientId;
}
private boolean isADFSTenant() {
return this.tenantId.equals(ADFS_TENANT);
}
private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new FileInputStream(certificatePath);
} else if (certificate != null) {
return certificate;
} else {
return null;
}
}
} | class IdentityClient {
private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
private static final Random RANDOM = new Random();
private static final String WINDOWS_STARTER = "cmd.exe";
private static final String LINUX_MAC_STARTER = "/bin/sh";
private static final String WINDOWS_SWITCHER = "/c";
private static final String LINUX_MAC_SWITCHER = "-c";
private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
private static final String LINUX_MAC_PROCESS_ERROR_MESSAGE = "(.*)az:(.*)not found";
private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
private static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final String DEFAULT_PUBLIC_CACHE_FILE_NAME = "msal.cache";
private static final String DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME = "msal.confidential.cache";
private static final Path DEFAULT_CACHE_FILE_PATH = Platform.isWindows()
? Paths.get(System.getProperty("user.home"), "AppData", "Local", ".IdentityService")
: Paths.get(System.getProperty("user.home"), ".IdentityService");
private static final String DEFAULT_KEYCHAIN_SERVICE = "Microsoft.Developer.IdentityService";
private static final String DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT = "MSALCache";
private static final String DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT = "MSALConfidentialCache";
private static final String DEFAULT_KEYRING_NAME = "default";
private static final String DEFAULT_KEYRING_SCHEMA = "msal.cache";
private static final String DEFAULT_PUBLIC_KEYRING_ITEM_NAME = DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME = DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT;
private static final String DEFAULT_KEYRING_ATTR_NAME = "MsalClientID";
private static final String DEFAULT_KEYRING_ATTR_VALUE = "Microsoft.Developer.IdentityService";
private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
private static final String MSI_ENDPOINT_VERSION = "2017-09-01";
private static final String ADFS_TENANT = "adfs";
private static final String HTTP_LOCALHOST = "http:
private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
private final ClientLogger logger = new ClientLogger(IdentityClient.class);
private final IdentityClientOptions options;
private final String tenantId;
private final String clientId;
private final String clientSecret;
private final InputStream certificate;
private final String certificatePath;
private final String certificatePassword;
private HttpPipelineAdapter httpPipelineAdapter;
private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor;
private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param options the options configuring the client.
*/
IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
IdentityClientOptions options) {
if (tenantId == null) {
tenantId = "organizations";
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.clientSecret = clientSecret;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.options = options;
this.publicClientApplicationAccessor = new SynchronizedAccessor<PublicClientApplication>(() ->
getPublicClientApplication(isSharedTokenCacheCredential));
this.confidentialClientApplicationAccessor = new SynchronizedAccessor<ConfidentialClientApplication>(() ->
getConfidentialClientApplication());
}
private ConfidentialClientApplication getConfidentialClientApplication() {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
InputStream pfxCertificateStream = getCertificateInputStream();
credential = ClientCredentialFactory.createFromCertificate(
pfxCertificateStream, certificatePassword);
}
} catch (IOException | GeneralSecurityException e) {
throw logger.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path"));
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_CONFIDENTIAL_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_CONFIDENTIAL_KEYCHAIN_ACCOUNT);
}
if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_CONFIDENTIAL_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME,
DEFAULT_KEYRING_ATTR_VALUE, null, null);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
applicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
return applicationBuilder.build();
}
private PublicClientApplication getPublicClientApplication(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId;
PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId);
try {
publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
publicClientApplicationBuilder.httpClient(httpPipelineAdapter);
} else {
publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
publicClientApplicationBuilder.executorService(options.getExecutorService());
}
Set<String> set = new HashSet<>(1);
set.add("CP1");
publicClientApplicationBuilder.clientCapabilities(set);
if (options.isSharedTokenCacheEnabled()) {
try {
PersistenceSettings.Builder persistenceSettingsBuilder = PersistenceSettings.builder(
DEFAULT_PUBLIC_CACHE_FILE_NAME, DEFAULT_CACHE_FILE_PATH);
if (Platform.isWindows()) {
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isMac()) {
persistenceSettingsBuilder.setMacKeychain(
DEFAULT_KEYCHAIN_SERVICE, DEFAULT_PUBLIC_KEYCHAIN_ACCOUNT);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} else if (Platform.isLinux()) {
try {
persistenceSettingsBuilder
.setLinuxKeyring(DEFAULT_KEYRING_NAME, DEFAULT_KEYRING_SCHEMA,
DEFAULT_PUBLIC_KEYRING_ITEM_NAME, DEFAULT_KEYRING_ATTR_NAME, DEFAULT_KEYRING_ATTR_VALUE,
null, null);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
} catch (KeyRingAccessException e) {
if (!options.getAllowUnencryptedCache()) {
throw logger.logExceptionAsError(e);
}
persistenceSettingsBuilder.setLinuxUseUnprotectedFileAsCacheStorage(true);
publicClientApplicationBuilder.setTokenCacheAccessAspect(
new PersistenceTokenCacheAccessAspect(persistenceSettingsBuilder.build()));
}
}
} catch (Throwable t) {
String message = "Shared token cache is unavailable in this environment.";
if (sharedTokenCacheCredential) {
throw logger.logExceptionAsError(new CredentialUnavailableException(message, t));
} else {
throw logger.logExceptionAsError(new ClientAuthenticationException(message, null, t));
}
}
}
return publicClientApplicationBuilder.build();
}
public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) {
try {
IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath());
IntelliJAuthMethodDetails authDetails = cacheAccessor.getAuthDetailsIfAvailable();
String authType = authDetails.getAuthMethod();
if (authType.equalsIgnoreCase("SP")) {
Map<String, String> spDetails = cacheAccessor
.getIntellijServicePrincipalDetails(authDetails.getCredFilePath());
String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant");
try {
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(spDetails.get("client"),
ClientCredentialFactory.createFromSecret(spDetails.get("key")))
.authority(authorityUrl);
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else if (options.getProxyOptions() != null) {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
ConfidentialClientApplication application = applicationBuilder.build();
return Mono.fromFuture(application.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes()))
.build())).map(MsalToken::new);
} catch (MalformedURLException e) {
return Mono.error(e);
}
} else if (authType.equalsIgnoreCase("DC")) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("IntelliJCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials();
String refreshToken = intelliJCredentials.get("refreshToken").textValue();
RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder =
RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
refreshTokenParametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build()))
.map(MsalToken::new));
} else {
throw logger.logExceptionAsError(new CredentialUnavailableException(
"IntelliJ Authentication not available."
+ " Please login with Azure Tools for IntelliJ plugin in the IDE."));
}
} catch (IOException e) {
return Mono.error(e);
}
}
/**
* Asynchronously acquire a token from Active Directory with Azure CLI.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) {
String azCommand = "az account get-access-token --output json --resource ";
StringBuilder command = new StringBuilder();
command.append(azCommand);
String scopes = ScopeUtil.scopesToResource(request.getScopes());
try {
ScopeUtil.validateScope(scopes);
} catch (IllegalArgumentException ex) {
return Mono.error(logger.logExceptionAsError(ex));
}
command.append(scopes);
AccessToken token = null;
BufferedReader reader = null;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, command.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw logger.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from."));
}
builder.redirectErrorStream(true);
Process process = builder.start();
reader = new BufferedReader(new InputStreamReader(process.getInputStream(), "UTF-8"));
String line;
StringBuilder output = new StringBuilder();
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || line.matches(LINUX_MAC_PROCESS_ERROR_MESSAGE)) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed"));
}
output.append(line);
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo("\"accessToken\": \"(.*?)(\"|$)", processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account"));
}
throw logger.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw logger.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw logger.logExceptionAsError(new IllegalStateException(e));
} catch (RuntimeException e) {
return Mono.error(logger.logExceptionAsError(e));
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException ex) {
return Mono.error(logger.logExceptionAsError(new IllegalStateException(ex)));
}
}
return Mono.just(token);
}
/**
* Asynchronously acquire a token from Active Directory with a client secret.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(
ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))
.map(MsalToken::new));
}
private HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
/**
* Asynchronously acquire a token from Active Directory with a username and a password.
*
* @param request the details of the token request
* @param username the username of the user
* @param password the password of the user
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request,
String username, String password) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(userNamePasswordParametersBuilder.build());
}
)).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and "
+ "password", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @param account the account used to login to acquire the last token
* @return a Publisher that emits an AccessToken
*/
/**
* Asynchronously acquire a token from the currently logged in client.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) {
return confidentialClientApplicationAccessor.getValue()
.flatMap(confidentialClient -> Mono.fromFuture(() -> {
SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder(
new HashSet<>(request.getScopes()));
try {
return confidentialClient.acquireTokenSilently(parametersBuilder.build());
} catch (MalformedURLException e) {
return getFailedCompletableFuture(logger.logExceptionAsError(new RuntimeException(e)));
}
}).map(ar -> (AccessToken) new MsalToken(ar))
.filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))));
}
/**
* Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide
* a device code for login and the user must meet the challenge by authenticating in a browser on the current or a
* different device.
*
* @param request the details of the token request
* @param deviceCodeConsumer the user provided closure that will consume the device code challenge
* @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device
* code expires
*/
public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request,
Consumer<DeviceCodeInfo> deviceCodeConsumer) {
return publicClientApplicationAccessor.getValue().flatMap(pc ->
Mono.fromFuture(() -> {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return pc.acquireToken(parametersBuilder.build());
}).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t))
.map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with Visual Sutdio cached refresh token.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken.
*/
public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) {
if (isADFSTenant()) {
return Mono.error(new CredentialUnavailableException("VsCodeCredential "
+ "authentication unavailable. ADFS tenant/authorities are not supported."));
}
VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor();
String credential = accessor.getCredentials("VS Code Azure", cloud);
RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters
.builder(new HashSet<>(request.getScopes()), credential);
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())).map(MsalToken::new));
}
/**
* Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow.
*
* @param request the details of the token request
* @param authorizationCode the oauth2 authorization code
* @param redirectUrl the redirectUrl where the authorization code is sent to
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode,
URI redirectUrl) {
AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder =
AuthorizationCodeParameters.builder(authorizationCode, redirectUrl)
.scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken;
if (clientSecret != null) {
acquireToken = confidentialClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
} else {
acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build())));
}
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with authorization code", null, t)).map(MsalToken::new);
}
/**
* Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The
* credential will run a minimal local HttpServer at the given port, so {@code http:
* listed as a valid reply URL for the application.
*
* @param request the details of the token request
* @param port the port on which the HTTP server is listening
* @return a Publisher that emits an AccessToken
*/
public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port,
String redirectUrl) {
URI redirectUri;
String redirect;
if (port != null) {
redirect = HTTP_LOCALHOST + ":" + port;
} else if (redirectUrl != null) {
redirect = redirectUrl;
} else {
redirect = HTTP_LOCALHOST;
}
try {
redirectUri = new URI(redirect);
} catch (URISyntaxException e) {
return Mono.error(logger.logExceptionAsError(new RuntimeException(e)));
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri).scopes(new HashSet<>(request.getScopes()));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build())));
return acquireToken.onErrorMap(t -> new ClientAuthenticationException(
"Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new);
}
/**
* Gets token from shared token cache
* */
public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) {
return publicClientApplicationAccessor.getValue()
.flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts())
.onErrorMap(t -> new CredentialUnavailableException(
"Cannot get accounts from token cache. Error: " + t.getMessage(), t))
.flatMap(set -> {
IAccount requestedAccount;
Map<String, IAccount> accounts = new HashMap<>();
if (set.isEmpty()) {
return Mono.error(new CredentialUnavailableException("SharedTokenCacheCredential "
+ "authentication unavailable. No accounts were found in the cache."));
}
for (IAccount cached : set) {
if (username == null || username.equals(cached.username())) {
if (!accounts.containsKey(cached.homeAccountId())) {
accounts.put(cached.homeAccountId(), cached);
}
}
}
if (accounts.isEmpty()) {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. No account matching the specified username: %s was "
+ "found in the cache.", username)));
} else if (accounts.size() > 1) {
if (username == null) {
return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication "
+ "unavailable. Multiple accounts were found in the cache. Use username and "
+ "tenant id to disambiguate."));
} else {
return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential "
+ "authentication unavailable. Multiple accounts matching the specified username: "
+ "%s were found in the cache.", username)));
}
} else {
requestedAccount = accounts.values().iterator().next();
}
return authenticateWithPublicClientCache(request, requestedAccount);
}));
}
/**
* Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode("2019-11-01", "UTF-8"));
URL url = new URL(String.format("%s?%s", identityEndpoint, payload));
String secretKey = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
} catch (IOException e) {
if (connection == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Failed to initialize "
+ "Http URL connection to the endpoint.",
null, e));
}
int status = connection.getResponseCode();
if (status != 401) {
throw logger.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401"
+ " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status),
null, e));
}
String realm = connection.getHeaderField("WWW-Authenticate");
if (realm == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
int separatorIndex = realm.indexOf("=");
if (separatorIndex == -1) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value"
+ " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint",
null));
}
String secretKeyPath = realm.substring(separatorIndex + 1);
secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8);
} finally {
if (connection != null) {
connection.disconnect();
}
}
if (secretKey == null) {
throw logger.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value"
+ " in the response from Azure Arc Managed Identity Endpoint",
null));
}
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey));
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner scanner = new Scanner(connection.getInputStream(), "UTF-8").useDelimiter("\\A");
String result = scanner.hasNext() ? scanner.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint,
String identityHeader,
String thumbprint,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
HttpsURLConnection connection = null;
String endpoint = identityEndpoint;
String headerValue = identityHeader;
String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION;
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpsURLConnection) url.openConnection();
IdentitySslUtil.addTrustedCertificateThumbprint(getClass().getSimpleName(), connection,
thumbprint);
connection.setRequestMethod("GET");
if (headerValue != null) {
connection.setRequestProperty("Secret", headerValue);
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the App Service Managed Service Identity endpoint.
*
* @param identityEndpoint the Identity endpoint to acquire token from
* @param identityHeader the identity header to acquire token with
* @param msiEndpoint the MSI endpoint to acquire token from
* @param msiSecret the msi secret to acquire token with
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader,
String msiEndpoint, String msiSecret,
TokenRequestContext request) {
return Mono.fromCallable(() -> {
String endpoint;
String headerValue;
String endpointVersion;
if (identityEndpoint != null) {
endpoint = identityEndpoint;
headerValue = identityHeader;
endpointVersion = IDENTITY_ENDPOINT_VERSION;
} else {
endpoint = msiEndpoint;
headerValue = msiSecret;
endpointVersion = MSI_ENDPOINT_VERSION;
}
String resource = ScopeUtil.scopesToResource(request.getScopes());
HttpURLConnection connection = null;
StringBuilder payload = new StringBuilder();
payload.append("resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
payload.append("&api-version=");
payload.append(URLEncoder.encode(endpointVersion, "UTF-8"));
if (clientId != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
payload.append("&client_id=");
} else {
payload.append("&clientid=");
}
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
try {
URL url = new URL(String.format("%s?%s", endpoint, payload));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
if (headerValue != null) {
if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) {
connection.setRequestProperty("X-IDENTITY-HEADER", headerValue);
} else {
connection.setRequestProperty("Secret", headerValue);
}
}
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} finally {
if (connection != null) {
connection.disconnect();
}
}
});
}
/**
* Asynchronously acquire a token from the Virtual Machine IMDS endpoint.
*
* @param request the details of the token request
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) {
String resource = ScopeUtil.scopesToResource(request.getScopes());
StringBuilder payload = new StringBuilder();
final int imdsUpgradeTimeInMs = 70 * 1000;
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
payload.append("&resource=");
payload.append(URLEncoder.encode(resource, "UTF-8"));
if (clientId != null) {
payload.append("&client_id=");
payload.append(URLEncoder.encode(clientId, "UTF-8"));
}
} catch (IOException exception) {
return Mono.error(exception);
}
return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> {
int retry = 1;
while (retry <= options.getMaxRetry()) {
URL url = null;
HttpURLConnection connection = null;
try {
url =
new URL(String.format("http:
payload.toString()));
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Metadata", "true");
connection.connect();
Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name())
.useDelimiter("\\A");
String result = s.hasNext() ? s.next() : "";
return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON);
} catch (IOException exception) {
if (connection == null) {
throw logger.logExceptionAsError(new RuntimeException(
String.format("Could not connect to the url: %s.", url), exception));
}
int responseCode;
try {
responseCode = connection.getResponseCode();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
}
if (responseCode == 400) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established.", null));
}
if (responseCode == 410
|| responseCode == 429
|| responseCode == 404
|| (responseCode >= 500 && responseCode <= 599)) {
int retryTimeoutInMs = options.getRetryTimeout()
.apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000;
retryTimeoutInMs =
(responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs
: retryTimeoutInMs;
retry++;
if (retry > options.getMaxRetry()) {
break;
} else {
sleep(retryTimeoutInMs);
}
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Couldn't acquire access token from IMDS, verify your objectId, "
+ "clientId or msiResourceId", exception));
}
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
throw logger.logExceptionAsError(new RuntimeException(
String.format("MSI: Failed to acquire tokens after retrying %s times",
options.getMaxRetry())));
}));
}
private Mono<Boolean> checkIMDSAvailable() {
StringBuilder payload = new StringBuilder();
try {
payload.append("api-version=");
payload.append(URLEncoder.encode("2018-02-01", "UTF-8"));
} catch (IOException exception) {
return Mono.error(exception);
}
return Mono.fromCallable(() -> {
HttpURLConnection connection = null;
URL url = new URL(String.format("http:
payload.toString()));
try {
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.setConnectTimeout(500);
connection.connect();
} catch (Exception e) {
throw logger.logExceptionAsError(
new CredentialUnavailableException(
"ManagedIdentityCredential authentication unavailable. "
+ "Connection to IMDS endpoint cannot be established, "
+ e.getMessage() + ".", e));
} finally {
if (connection != null) {
connection.disconnect();
}
}
return true;
});
}
private static void sleep(int millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Type.HTTP, options.getAddress());
}
}
private String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
private boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
private String redactInfo(String regex, String input) {
return input.replaceAll(regex, "****");
}
void openUrl(String url) throws IOException {
Runtime rt = Runtime.getRuntime();
String os = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (os.contains("win")) {
rt.exec("rundll32 url.dll,FileProtocolHandler " + url);
} else if (os.contains("mac")) {
rt.exec("open " + url);
} else if (os.contains("nix") || os.contains("nux")) {
rt.exec("xdg-open " + url);
} else {
logger.error("Browser could not be opened - please open {} in a browser on this device.", url);
}
}
private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) {
CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>();
completableFuture.completeExceptionally(e);
return completableFuture;
}
private void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient));
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()));
}
}
}
/**
* Get the configured tenant id.
*
* @return the tenant id.
*/
public String getTenantId() {
return tenantId;
}
/**
* Get the configured client id.
*
* @return the client id.
*/
public String getClientId() {
return clientId;
}
private boolean isADFSTenant() {
return this.tenantId.equals(ADFS_TENANT);
}
private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new FileInputStream(certificatePath);
} else if (certificate != null) {
return certificate;
} else {
return null;
}
}
} |
Instead of needing to use a regex could we use `HttpHeader.getValue` or `HttpHeader.getValueList`? It's looking like we use the regex to handle comma concatenated header values. | List<AuthenticationChallenge> parseChallenges(String header) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header);
List<AuthenticationChallenge> challenges = new ArrayList<>();
while (matcher.find()) {
challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2)));
}
return challenges;
} | Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); | List<AuthenticationChallenge> parseChallenges(String header) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header);
List<AuthenticationChallenge> challenges = new ArrayList<>();
while (matcher.find()) {
challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2)));
}
return challenges;
} | class BearerTokenAuthenticationChallengePolicy implements HttpPipelinePolicy {
private static final String AUTHORIZATION_HEADER = "Authorization";
private static final String BEARER = "Bearer";
public static final Pattern AUTHENTICATION_CHALLENGE_PATTERN =
Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?");
public static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN =
Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+");
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String CLAIMS_PARAMETER = "claims";
private final TokenCredential credential;
private final String[] scopes;
private final Supplier<Mono<AccessToken>> defaultTokenSupplier;
private final AccessTokenCache cache;
/**
* Creates BearerTokenAuthenticationChallengePolicy.
*
* @param credential the token credential to authenticate the request
* @param scopes the scopes of authentication the credential should get token for
*/
public BearerTokenAuthenticationChallengePolicy(TokenCredential credential, String... scopes) {
Objects.requireNonNull(credential);
this.credential = credential;
this.scopes = scopes;
this.defaultTokenSupplier = () -> credential.getToken(new TokenRequestContext().addScopes(scopes));
this.cache = new AccessTokenCache(defaultTokenSupplier);
}
/**
*
* Executed before sending the initial request and authenticates the request.
*
* @param context The request context.
* @return A {@link Mono} containing {@link Void}
*/
public Mono<Void> onBeforeRequest(HttpPipelineCallContext context) {
return authenticateRequest(context, defaultTokenSupplier, false);
}
/**
* Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication
* challenge header is received after the initial request.
*
* @param context The request context.
* @param response The Http Response containing the authentication challenge header.
* @return A {@link Mono} containing the status, whether the challenge was successfully extracted and handled.
* if true then a follow up request needs to be sent authorized with the challenge based bearer token.
*/
public Mono<Boolean> onChallenge(HttpPipelineCallContext context, HttpResponse response) {
String authHeader = response.getHeaderValue(WWW_AUTHENTICATE);
if (response.getStatusCode() == 401 && authHeader != null) {
List<AuthenticationChallenge> challenges = parseChallenges(authHeader);
for (AuthenticationChallenge authenticationChallenge : challenges) {
Map<String, String> extractedChallengeParams =
parseChallengeParams(authenticationChallenge.getChallengeParameters());
if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) {
String claims = new String(Base64.getUrlDecoder()
.decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8);
return authenticateRequest(context,
() -> credential.getToken(new TokenRequestContext()
.addScopes(scopes).setClaims(claims)), true)
.flatMap(b -> Mono.just(true));
}
}
}
return Mono.just(false);
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) {
return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme"));
}
HttpPipelineNextPolicy nextPolicy = next.clone();
return onBeforeRequest(context)
.then(next.process())
.flatMap(httpResponse -> {
String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE);
if (httpResponse.getStatusCode() == 401 && authHeader != null) {
return onChallenge(context, httpResponse).flatMap(retry -> {
if (retry) {
return nextPolicy.process();
} else {
return Mono.just(httpResponse);
}
});
}
return Mono.just(httpResponse);
});
}
/**
* Get the {@link AccessTokenCache} holding the cached access tokens and the logic to retrieve and refresh
* access tokens.
*
* @return the {@link AccessTokenCache}
*/
public AccessTokenCache getTokenCache() {
return cache;
}
private Mono<Void> authenticateRequest(HttpPipelineCallContext context, Supplier<Mono<AccessToken>> tokenSupplier,
boolean forceTokenRefresh) {
return cache.getToken(tokenSupplier, forceTokenRefresh)
.flatMap(token -> {
context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken());
return Mono.empty();
});
}
Map<String, String> parseChallengeParams(String challengeParams) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams);
Map<String, String> challengeParameters = new HashMap<>();
while (matcher.find()) {
challengeParameters.put(matcher.group(1), matcher.group(2));
}
return challengeParameters;
}
} | class BearerTokenAuthenticationChallengePolicy implements HttpPipelinePolicy {
private static final String AUTHORIZATION_HEADER = "Authorization";
private static final String BEARER = "Bearer";
public static final Pattern AUTHENTICATION_CHALLENGE_PATTERN =
Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?");
public static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN =
Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+");
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String CLAIMS_PARAMETER = "claims";
private final TokenCredential credential;
private final String[] scopes;
private final Supplier<Mono<AccessToken>> defaultTokenSupplier;
private final AccessTokenCache cache;
/**
* Creates BearerTokenAuthenticationChallengePolicy.
*
* @param credential the token credential to authenticate the request
* @param scopes the scopes of authentication the credential should get token for
*/
public BearerTokenAuthenticationChallengePolicy(TokenCredential credential, String... scopes) {
Objects.requireNonNull(credential);
this.credential = credential;
this.scopes = scopes;
this.defaultTokenSupplier = () -> credential.getToken(new TokenRequestContext().addScopes(scopes));
this.cache = new AccessTokenCache(defaultTokenSupplier);
}
/**
*
* Executed before sending the initial request and authenticates the request.
*
* @param context The request context.
* @return A {@link Mono} containing {@link Void}
*/
public Mono<Void> onBeforeRequest(HttpPipelineCallContext context) {
return authenticateRequest(context, defaultTokenSupplier, false);
}
/**
* Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication
* challenge header is received after the initial request.
*
* @param context The request context.
* @param response The Http Response containing the authentication challenge header.
* @return A {@link Mono} containing the status, whether the challenge was successfully extracted and handled.
* if true then a follow up request needs to be sent authorized with the challenge based bearer token.
*/
public Mono<Boolean> onChallenge(HttpPipelineCallContext context, HttpResponse response) {
String authHeader = response.getHeaderValue(WWW_AUTHENTICATE);
if (response.getStatusCode() == 401 && authHeader != null) {
List<AuthenticationChallenge> challenges = parseChallenges(authHeader);
for (AuthenticationChallenge authenticationChallenge : challenges) {
Map<String, String> extractedChallengeParams =
parseChallengeParams(authenticationChallenge.getChallengeParameters());
if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) {
String claims = new String(Base64.getUrlDecoder()
.decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8);
return authenticateRequest(context,
() -> credential.getToken(new TokenRequestContext()
.addScopes(scopes).setClaims(claims)), true)
.flatMap(b -> Mono.just(true));
}
}
}
return Mono.just(false);
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) {
return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme"));
}
HttpPipelineNextPolicy nextPolicy = next.clone();
return onBeforeRequest(context)
.then(next.process())
.flatMap(httpResponse -> {
String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE);
if (httpResponse.getStatusCode() == 401 && authHeader != null) {
return onChallenge(context, httpResponse).flatMap(retry -> {
if (retry) {
return nextPolicy.process();
} else {
return Mono.just(httpResponse);
}
});
}
return Mono.just(httpResponse);
});
}
/**
* Get the {@link AccessTokenCache} holding the cached access tokens and the logic to retrieve and refresh
* access tokens.
*
* @return the {@link AccessTokenCache}
*/
public AccessTokenCache getTokenCache() {
return cache;
}
private Mono<Void> authenticateRequest(HttpPipelineCallContext context, Supplier<Mono<AccessToken>> tokenSupplier,
boolean forceTokenRefresh) {
return cache.getToken(tokenSupplier, forceTokenRefresh)
.flatMap(token -> {
context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken());
return Mono.empty();
});
}
Map<String, String> parseChallengeParams(String challengeParams) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams);
Map<String, String> challengeParameters = new HashMap<>();
while (matcher.find()) {
challengeParameters.put(matcher.group(1), matcher.group(2));
}
return challengeParameters;
}
} |
this could be utilized in the next preview release, will try this out. | List<AuthenticationChallenge> parseChallenges(String header) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header);
List<AuthenticationChallenge> challenges = new ArrayList<>();
while (matcher.find()) {
challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2)));
}
return challenges;
} | Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); | List<AuthenticationChallenge> parseChallenges(String header) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header);
List<AuthenticationChallenge> challenges = new ArrayList<>();
while (matcher.find()) {
challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2)));
}
return challenges;
} | class BearerTokenAuthenticationChallengePolicy implements HttpPipelinePolicy {
private static final String AUTHORIZATION_HEADER = "Authorization";
private static final String BEARER = "Bearer";
public static final Pattern AUTHENTICATION_CHALLENGE_PATTERN =
Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?");
public static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN =
Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+");
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String CLAIMS_PARAMETER = "claims";
private final TokenCredential credential;
private final String[] scopes;
private final Supplier<Mono<AccessToken>> defaultTokenSupplier;
private final AccessTokenCache cache;
/**
* Creates BearerTokenAuthenticationChallengePolicy.
*
* @param credential the token credential to authenticate the request
* @param scopes the scopes of authentication the credential should get token for
*/
public BearerTokenAuthenticationChallengePolicy(TokenCredential credential, String... scopes) {
Objects.requireNonNull(credential);
this.credential = credential;
this.scopes = scopes;
this.defaultTokenSupplier = () -> credential.getToken(new TokenRequestContext().addScopes(scopes));
this.cache = new AccessTokenCache(defaultTokenSupplier);
}
/**
*
* Executed before sending the initial request and authenticates the request.
*
* @param context The request context.
* @return A {@link Mono} containing {@link Void}
*/
public Mono<Void> onBeforeRequest(HttpPipelineCallContext context) {
return authenticateRequest(context, defaultTokenSupplier, false);
}
/**
* Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication
* challenge header is received after the initial request.
*
* @param context The request context.
* @param response The Http Response containing the authentication challenge header.
* @return A {@link Mono} containing the status, whether the challenge was successfully extracted and handled.
* if true then a follow up request needs to be sent authorized with the challenge based bearer token.
*/
public Mono<Boolean> onChallenge(HttpPipelineCallContext context, HttpResponse response) {
String authHeader = response.getHeaderValue(WWW_AUTHENTICATE);
if (response.getStatusCode() == 401 && authHeader != null) {
List<AuthenticationChallenge> challenges = parseChallenges(authHeader);
for (AuthenticationChallenge authenticationChallenge : challenges) {
Map<String, String> extractedChallengeParams =
parseChallengeParams(authenticationChallenge.getChallengeParameters());
if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) {
String claims = new String(Base64.getUrlDecoder()
.decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8);
return authenticateRequest(context,
() -> credential.getToken(new TokenRequestContext()
.addScopes(scopes).setClaims(claims)), true)
.flatMap(b -> Mono.just(true));
}
}
}
return Mono.just(false);
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) {
return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme"));
}
HttpPipelineNextPolicy nextPolicy = next.clone();
return onBeforeRequest(context)
.then(next.process())
.flatMap(httpResponse -> {
String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE);
if (httpResponse.getStatusCode() == 401 && authHeader != null) {
return onChallenge(context, httpResponse).flatMap(retry -> {
if (retry) {
return nextPolicy.process();
} else {
return Mono.just(httpResponse);
}
});
}
return Mono.just(httpResponse);
});
}
/**
* Get the {@link AccessTokenCache} holding the cached access tokens and the logic to retrieve and refresh
* access tokens.
*
* @return the {@link AccessTokenCache}
*/
public AccessTokenCache getTokenCache() {
return cache;
}
private Mono<Void> authenticateRequest(HttpPipelineCallContext context, Supplier<Mono<AccessToken>> tokenSupplier,
boolean forceTokenRefresh) {
return cache.getToken(tokenSupplier, forceTokenRefresh)
.flatMap(token -> {
context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken());
return Mono.empty();
});
}
Map<String, String> parseChallengeParams(String challengeParams) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams);
Map<String, String> challengeParameters = new HashMap<>();
while (matcher.find()) {
challengeParameters.put(matcher.group(1), matcher.group(2));
}
return challengeParameters;
}
} | class BearerTokenAuthenticationChallengePolicy implements HttpPipelinePolicy {
private static final String AUTHORIZATION_HEADER = "Authorization";
private static final String BEARER = "Bearer";
public static final Pattern AUTHENTICATION_CHALLENGE_PATTERN =
Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?");
public static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN =
Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+");
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String CLAIMS_PARAMETER = "claims";
private final TokenCredential credential;
private final String[] scopes;
private final Supplier<Mono<AccessToken>> defaultTokenSupplier;
private final AccessTokenCache cache;
/**
* Creates BearerTokenAuthenticationChallengePolicy.
*
* @param credential the token credential to authenticate the request
* @param scopes the scopes of authentication the credential should get token for
*/
public BearerTokenAuthenticationChallengePolicy(TokenCredential credential, String... scopes) {
Objects.requireNonNull(credential);
this.credential = credential;
this.scopes = scopes;
this.defaultTokenSupplier = () -> credential.getToken(new TokenRequestContext().addScopes(scopes));
this.cache = new AccessTokenCache(defaultTokenSupplier);
}
/**
*
* Executed before sending the initial request and authenticates the request.
*
* @param context The request context.
* @return A {@link Mono} containing {@link Void}
*/
public Mono<Void> onBeforeRequest(HttpPipelineCallContext context) {
return authenticateRequest(context, defaultTokenSupplier, false);
}
/**
* Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication
* challenge header is received after the initial request.
*
* @param context The request context.
* @param response The Http Response containing the authentication challenge header.
* @return A {@link Mono} containing the status, whether the challenge was successfully extracted and handled.
* if true then a follow up request needs to be sent authorized with the challenge based bearer token.
*/
public Mono<Boolean> onChallenge(HttpPipelineCallContext context, HttpResponse response) {
String authHeader = response.getHeaderValue(WWW_AUTHENTICATE);
if (response.getStatusCode() == 401 && authHeader != null) {
List<AuthenticationChallenge> challenges = parseChallenges(authHeader);
for (AuthenticationChallenge authenticationChallenge : challenges) {
Map<String, String> extractedChallengeParams =
parseChallengeParams(authenticationChallenge.getChallengeParameters());
if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) {
String claims = new String(Base64.getUrlDecoder()
.decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8);
return authenticateRequest(context,
() -> credential.getToken(new TokenRequestContext()
.addScopes(scopes).setClaims(claims)), true)
.flatMap(b -> Mono.just(true));
}
}
}
return Mono.just(false);
}
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) {
if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) {
return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme"));
}
HttpPipelineNextPolicy nextPolicy = next.clone();
return onBeforeRequest(context)
.then(next.process())
.flatMap(httpResponse -> {
String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE);
if (httpResponse.getStatusCode() == 401 && authHeader != null) {
return onChallenge(context, httpResponse).flatMap(retry -> {
if (retry) {
return nextPolicy.process();
} else {
return Mono.just(httpResponse);
}
});
}
return Mono.just(httpResponse);
});
}
/**
* Get the {@link AccessTokenCache} holding the cached access tokens and the logic to retrieve and refresh
* access tokens.
*
* @return the {@link AccessTokenCache}
*/
public AccessTokenCache getTokenCache() {
return cache;
}
private Mono<Void> authenticateRequest(HttpPipelineCallContext context, Supplier<Mono<AccessToken>> tokenSupplier,
boolean forceTokenRefresh) {
return cache.getToken(tokenSupplier, forceTokenRefresh)
.flatMap(token -> {
context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken());
return Mono.empty();
});
}
Map<String, String> parseChallengeParams(String challengeParams) {
Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams);
Map<String, String> challengeParameters = new HashMap<>();
while (matcher.find()) {
challengeParameters.put(matcher.group(1), matcher.group(2));
}
return challengeParameters;
}
} |
nit: let's add an import at the top | public SmsClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
com.azure.communication.common.implementation.CommunicationConnectionString connectionStringObject = new com.azure.communication.common.implementation.CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
this
.endpoint(endpoint)
.accessKey(accessKey);
return this;
} | com.azure.communication.common.implementation.CommunicationConnectionString connectionStringObject = new com.azure.communication.common.implementation.CommunicationConnectionString(connectionString); | public SmsClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString);
String endpoint = connectionStringObject.getEndpoint();
String accessKey = connectionStringObject.getAccessKey();
this
.endpoint(endpoint)
.accessKey(accessKey);
return this;
} | class SmsClientBuilder {
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final String APP_CONFIG_PROPERTIES = "azure-communication-sms.properties";
private final ClientLogger logger = new ClientLogger(SmsClientBuilder.class);
private String endpoint;
private CommunicationClientCredential accessKeyCredential;
private TokenCredential tokenCredential;
private HttpClient httpClient;
private HttpPipeline pipeline;
private final Configuration configuration = Configuration.getGlobalConfiguration().clone();
private final Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
private final HttpLogOptions httpLogOptions = new HttpLogOptions();
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<HttpPipelinePolicy>();
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return SmsClientBuilder
*/
public SmsClientBuilder endpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}
/**
* Set endpoint of the service
*
* @param pipeline HttpPipeline to use, if a pipeline is not
* supplied, the credential and httpClient fields must be set
* @return SmsClientBuilder
*/
public SmsClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}
/**
* Set accessKeyCredential to use
*
* @param accessKey access key for initalizing CommunicationClientCredential
* @return SmsClientBuilder
*/
public SmsClientBuilder accessKey(String accessKey) {
Objects.requireNonNull(accessKey, "'accessKey' cannot be null.");
this.accessKeyCredential = new CommunicationClientCredential(accessKey);
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests.
* @return The updated {@link SmsClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public SmsClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Set endpoint and credential to use
*
* @param connectionString connection string for setting endpoint and initalizing CommunicationClientCredential
* @return SmsClientBuilder
*/
/**
* Set httpClient to use
*
* @param httpClient httpClient to use, overridden by the pipeline
* field.
* @return SmsClientBuilder
*/
public SmsClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = httpClient;
return this;
}
/**
* Apply additional HttpPipelinePolicy
*
* @param customPolicy HttpPipelinePolicy object to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy
* @return SmsClientBuilder
*/
public SmsClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(customPolicy);
return this;
}
/**
* Create asynchronous client applying HMACAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return SmsAsyncClient instance
*/
public SmsAsyncClient buildAsyncClient() {
return new SmsAsyncClient(createServiceImpl());
}
/**
* Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return SmsClient instance
*/
public SmsClient buildClient() {
return new SmsClient(buildAsyncClient());
}
private AzureCommunicationSMSServiceImpl createServiceImpl() {
Objects.requireNonNull(endpoint);
if (this.pipeline == null) {
Objects.requireNonNull(httpClient);
}
HttpPipeline builderPipeline = this.pipeline;
if (this.pipeline == null) {
HttpPipelinePolicy[] customPolicyArray = null;
if (customPolicies.size() > 0) {
customPolicyArray = new HttpPipelinePolicy[customPolicies.size()];
customPolicyArray = customPolicies.toArray(customPolicyArray);
}
builderPipeline = createHttpPipeline(httpClient,
createHttpPipelineAuthPolicy(),
customPolicyArray);
}
AzureCommunicationSMSServiceImplBuilder clientBuilder = new AzureCommunicationSMSServiceImplBuilder();
clientBuilder.endpoint(endpoint)
.pipeline(builderPipeline);
return clientBuilder.buildClient();
}
private HttpPipelinePolicy createHttpPipelineAuthPolicy() {
if (this.tokenCredential != null && this.accessKeyCredential != null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used."));
}
if (this.tokenCredential != null) {
return new BearerTokenAuthenticationPolicy(
this.tokenCredential, "https:
} else if (this.accessKeyCredential != null) {
return new HmacAuthenticationPolicy(this.accessKeyCredential);
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
}
private HttpPipeline createHttpPipeline(HttpClient httpClient,
HttpPipelinePolicy authorizationPolicy,
HttpPipelinePolicy[] additionalPolicies) {
HttpPipelinePolicy[] policies = new HttpPipelinePolicy[4];
if (additionalPolicies != null) {
policies = new HttpPipelinePolicy[4 + additionalPolicies.length];
applyAdditionalPolicies(policies, additionalPolicies);
}
policies[0] = authorizationPolicy;
applyRequirePolicies(policies);
return new HttpPipelineBuilder()
.policies(policies)
.httpClient(httpClient)
.build();
}
private void applyRequirePolicies(HttpPipelinePolicy[] policies) {
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policies[1] = new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion, configuration);
policies[2] = new RetryPolicy();
policies[3] = new CookiePolicy();
}
private void applyAdditionalPolicies(HttpPipelinePolicy[] policies,
HttpPipelinePolicy[] customPolicies) {
for (int i = 0; i < customPolicies.length; i++) {
policies[4 + i] = customPolicies[i];
}
}
} | class SmsClientBuilder {
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final String APP_CONFIG_PROPERTIES = "azure-communication-sms.properties";
private final ClientLogger logger = new ClientLogger(SmsClientBuilder.class);
private String endpoint;
private AzureKeyCredential accessKeyCredential;
private TokenCredential tokenCredential;
private HttpClient httpClient;
private HttpPipeline pipeline;
private final Configuration configuration = Configuration.getGlobalConfiguration().clone();
private final Map<String, String> properties = CoreUtils.getProperties(APP_CONFIG_PROPERTIES);
private final HttpLogOptions httpLogOptions = new HttpLogOptions();
private final List<HttpPipelinePolicy> customPolicies = new ArrayList<HttpPipelinePolicy>();
/**
* Set endpoint of the service
*
* @param endpoint url of the service
* @return SmsClientBuilder
*/
public SmsClientBuilder endpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}
/**
* Set endpoint of the service
*
* @param pipeline HttpPipeline to use, if a pipeline is not
* supplied, the credential and httpClient fields must be set
* @return SmsClientBuilder
*/
public SmsClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}
/**
* Set accessKeyCredential to use
*
* @param accessKey access key for initalizing AzureKeyCredential
* @return SmsClientBuilder
*/
public SmsClientBuilder accessKey(String accessKey) {
Objects.requireNonNull(accessKey, "'accessKey' cannot be null.");
this.accessKeyCredential = new AzureKeyCredential(accessKey);
return this;
}
/**
* Sets the {@link TokenCredential} used to authenticate HTTP requests.
*
* @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests.
* @return The updated {@link SmsClientBuilder} object.
* @throws NullPointerException If {@code tokenCredential} is null.
*/
public SmsClientBuilder credential(TokenCredential tokenCredential) {
this.tokenCredential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null.");
return this;
}
/**
* Set endpoint and credential to use
*
* @param connectionString connection string for setting endpoint and initalizing AzureKeyCredential
* @return SmsClientBuilder
*/
/**
* Set httpClient to use
*
* @param httpClient httpClient to use, overridden by the pipeline
* field.
* @return SmsClientBuilder
*/
public SmsClientBuilder httpClient(HttpClient httpClient) {
this.httpClient = httpClient;
return this;
}
/**
* Apply additional HttpPipelinePolicy
*
* @param customPolicy HttpPipelinePolicy object to be applied after
* AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy
* @return SmsClientBuilder
*/
public SmsClientBuilder addPolicy(HttpPipelinePolicy customPolicy) {
this.customPolicies.add(customPolicy);
return this;
}
/**
* Create asynchronous client applying HMACAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return SmsAsyncClient instance
*/
public SmsAsyncClient buildAsyncClient() {
return new SmsAsyncClient(createServiceImpl());
}
/**
* Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy,
* RetryPolicy, and CookiePolicy.
* Additional HttpPolicies specified by additionalPolicies will be applied after them
*
* @return SmsClient instance
*/
public SmsClient buildClient() {
return new SmsClient(buildAsyncClient());
}
private AzureCommunicationSMSServiceImpl createServiceImpl() {
Objects.requireNonNull(endpoint);
if (this.pipeline == null) {
Objects.requireNonNull(httpClient);
}
HttpPipeline builderPipeline = this.pipeline;
if (this.pipeline == null) {
HttpPipelinePolicy[] customPolicyArray = null;
if (customPolicies.size() > 0) {
customPolicyArray = new HttpPipelinePolicy[customPolicies.size()];
customPolicyArray = customPolicies.toArray(customPolicyArray);
}
builderPipeline = createHttpPipeline(httpClient,
createHttpPipelineAuthPolicy(),
customPolicyArray);
}
AzureCommunicationSMSServiceImplBuilder clientBuilder = new AzureCommunicationSMSServiceImplBuilder();
clientBuilder.endpoint(endpoint)
.pipeline(builderPipeline);
return clientBuilder.buildClient();
}
private HttpPipelinePolicy createHttpPipelineAuthPolicy() {
if (this.tokenCredential != null && this.accessKeyCredential != null) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used."));
}
if (this.tokenCredential != null) {
return new BearerTokenAuthenticationPolicy(
this.tokenCredential, "https:
} else if (this.accessKeyCredential != null) {
return new HmacAuthenticationPolicy(this.accessKeyCredential);
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
}
private HttpPipeline createHttpPipeline(HttpClient httpClient,
HttpPipelinePolicy authorizationPolicy,
HttpPipelinePolicy[] additionalPolicies) {
HttpPipelinePolicy[] policies = new HttpPipelinePolicy[4];
if (additionalPolicies != null) {
policies = new HttpPipelinePolicy[4 + additionalPolicies.length];
applyAdditionalPolicies(policies, additionalPolicies);
}
policies[0] = authorizationPolicy;
applyRequirePolicies(policies);
return new HttpPipelineBuilder()
.policies(policies)
.httpClient(httpClient)
.build();
}
private void applyRequirePolicies(HttpPipelinePolicy[] policies) {
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
policies[1] = new UserAgentPolicy(httpLogOptions.getApplicationId(), clientName, clientVersion, configuration);
policies[2] = new RetryPolicy();
policies[3] = new CookiePolicy();
}
private void applyAdditionalPolicies(HttpPipelinePolicy[] policies,
HttpPipelinePolicy[] customPolicies) {
for (int i = 0; i < customPolicies.length; i++) {
policies[4 + i] = customPolicies[i];
}
}
} |
make method for this | public void customize(LibraryCustomization customization) {
PackageCustomization impl = customization.getPackage("com.azure.storage.queue.implementation");
ClassCustomization queuesImpl = impl.getClass("QueuesImpl");
MethodCustomization create = queuesImpl.getMethod("create");
create.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
create.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization delete = queuesImpl.getMethod("delete");
delete.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
delete.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization getProperties = queuesImpl.getMethod("getProperties");
getProperties.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization setMetadata = queuesImpl.getMethod("setMetadata");
setMetadata.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setMetadata.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization getAccessPolicy = queuesImpl.getMethod("getAccessPolicy");
getAccessPolicy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getAccessPolicy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization setAccessPolicy = queuesImpl.getMethod("setAccessPolicy");
setAccessPolicy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setAccessPolicy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
ClassCustomization messageIdsImpl = impl.getClass("MessageIdsImpl");
MethodCustomization update = messageIdsImpl.getMethod("update");
update.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
update.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization delete1 = messageIdsImpl.getMethod("delete");
delete1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
delete1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
ClassCustomization messagesImpl = impl.getClass("MessagesImpl");
MethodCustomization dequeue = messagesImpl.getMethod("dequeue");
dequeue.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
dequeue.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization clear = messagesImpl.getMethod("clear");
clear.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
clear.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization enqueue = messagesImpl.getMethod("enqueue");
enqueue.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
enqueue.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization peek = messagesImpl.getMethod("peek");
peek.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
peek.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
ClassCustomization servicesImpl = impl.getClass("ServicesImpl");
MethodCustomization setProperties1 = servicesImpl.getMethod("setProperties");
setProperties1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setProperties1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization getProperties1 = servicesImpl.getMethod("getProperties");
getProperties1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization getStatistics = servicesImpl.getMethod("getStatistics");
getStatistics.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getStatistics.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization listQueuesSegment = servicesImpl.getMethod("listQueuesSegment");
listQueuesSegment.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listQueuesSegment.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
MethodCustomization listQueuesSegmentNext = servicesImpl.getMethod("listQueuesSegmentNext");
listQueuesSegmentNext.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listQueuesSegmentNext.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
PackageCustomization models = customization.getPackage("com.azure.storage.queue.models");
ClassCustomization queueServiceProperties = models.getClass("QueueServiceProperties");
PropertyCustomization hourMetrics = queueServiceProperties.getProperty("hourMetrics");
hourMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
hourMetrics.addAnnotation("@JsonProperty(value = \"HourMetrics\")");
PropertyCustomization minuteMetrics = queueServiceProperties.getProperty("minuteMetrics");
minuteMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
minuteMetrics.addAnnotation("@JsonProperty(value = \"MinuteMetrics\")");
} | create.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)"); | public void customize(LibraryCustomization customization) {
PackageCustomization impl = customization.getPackage("com.azure.storage.queue.implementation");
ClassCustomization queuesImpl = impl.getClass("QueuesImpl");
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("create"));
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("delete"));
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("setMetadata"));
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("getAccessPolicy"));
modifyUnexpectedResponseExceptionType(queuesImpl.getMethod("setAccessPolicy"));
ClassCustomization messageIdsImpl = impl.getClass("MessageIdsImpl");
modifyUnexpectedResponseExceptionType(messageIdsImpl.getMethod("update"));
modifyUnexpectedResponseExceptionType(messageIdsImpl.getMethod("delete"));
ClassCustomization messagesImpl = impl.getClass("MessagesImpl");
modifyUnexpectedResponseExceptionType(messagesImpl.getMethod("dequeue"));
modifyUnexpectedResponseExceptionType(messagesImpl.getMethod("clear"));
modifyUnexpectedResponseExceptionType(messagesImpl.getMethod("enqueue"));
modifyUnexpectedResponseExceptionType(messagesImpl.getMethod("peek"));
ClassCustomization servicesImpl = impl.getClass("ServicesImpl");
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("setProperties"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("getStatistics"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("listQueuesSegment"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("listQueuesSegmentNext"));
PackageCustomization models = customization.getPackage("com.azure.storage.queue.models");
ClassCustomization queueServiceProperties = models.getClass("QueueServiceProperties");
PropertyCustomization hourMetrics = queueServiceProperties.getProperty("hourMetrics");
hourMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
hourMetrics.addAnnotation("@JsonProperty(value = \"HourMetrics\")");
PropertyCustomization minuteMetrics = queueServiceProperties.getProperty("minuteMetrics");
minuteMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
minuteMetrics.addAnnotation("@JsonProperty(value = \"MinuteMetrics\")");
} | class QueueStorageCustomization extends Customization {
@Override
} | class QueueStorageCustomization extends Customization {
@Override
private void modifyUnexpectedResponseExceptionType(MethodCustomization method) {
method.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
method.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.queue.models.QueueStorageException.class)");
}
} |
It will be no longer applicable since we don't allow user to set the `top` and `skip` values | public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeTasksPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<AnalyzeTasksResult>>
syncPoller = client.beginAnalyzeTasks(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<AnalyzeTasksResult> result = syncPoller.getFinalResult();
validateAnalyzeTasksResultList(options.isIncludeStatistics(),
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.stream().collect(Collectors.toList()));
}, 22);
} | }, 22); | public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeTasksPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<AnalyzeTasksResult>>
syncPoller = client.beginAnalyzeTasks(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<AnalyzeTasksResult> result = syncPoller.getFinalResult();
validateAnalyzeTasksResultList(options.isIncludeStatistics(),
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.stream().collect(Collectors.toList()));
}, 22);
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
private TextAnalyticsClient getTextAnalyticsClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguageResultCollectionWithResponse(true,
getExpectedBatchDetectedLanguages(), 200,
client.detectLanguageBatchWithResponse(inputs, options, Context.NONE)));
}
/**
* Test Detect batch of documents languages.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) -> validateDetectLanguageResultCollectionWithResponse(false,
getExpectedBatchDetectedLanguages(), 200,
client.detectLanguageBatchWithResponse(inputs, null, Context.NONE)));
}
/**
* Test detect batch languages for a list of string input with country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguageResultCollection(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguageBatch(inputs, countryHint, null)));
}
/**
* Test detect batch languages for a list of string input with request options
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) -> validateDetectLanguageResultCollection(true,
getExpectedBatchDetectedLanguages(), client.detectLanguageBatch(inputs, null, options)));
}
/**
* Test detect batch languages for a list of string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) -> validateDetectLanguageResultCollection(
false, getExpectedBatchDetectedLanguages(), client.detectLanguageBatch(inputs, null, null)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a document to detect language.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
validatePrimaryLanguage(getDetectedLanguageEnglish(), client.detectLanguage(input)));
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.detectLanguage(input));
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> validatePrimaryLanguage(client.detectLanguage(input), getUnknownDetectedLanguage()));
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectLanguageBatchWithResponse(inputs, options, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.detectLanguageBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
/**
* Verifies that a TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.detectLanguage(input, countryHint));
assertEquals(INVALID_COUNTRY_HINT, exception.getErrorCode());
});
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
validatePrimaryLanguage(getDetectedLanguageSpanish(), client.detectLanguage(input, countryHint)));
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
validatePrimaryLanguage(getDetectedLanguageSpanish(), client.detectLanguage(input, countryHint)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input -> {
final List<CategorizedEntity> entities = client.recognizeEntities(input).stream().collect(Collectors.toList());
validateCategorizedEntities(getCategorizedEntitiesList1(), entities);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.recognizeEntities(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> assertFalse(client.recognizeEntities(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> {
Response<RecognizeEntitiesResultCollection> response = client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE);
response.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200,
client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200,
client.recognizeEntitiesBatchWithResponse(inputs, options, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, language, null))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, null, options))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document -> {
final PiiEntityCollection entities = client.recognizePiiEntities(document);
validatePiiEntities(getPiiEntitiesList1(), entities.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(document -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class, () ->
client.recognizePiiEntities(document).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(document -> assertFalse(client.recognizePiiEntities(document).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) -> {
Response<RecognizePiiEntitiesResultCollection> response = client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE);
response.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner(inputs ->
validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200,
client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200,
client.recognizePiiEntitiesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiEntitiesLanguageHintRunner((inputs, language) ->
validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesBatch(inputs, language, null))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) -> {
final PiiEntityCollection entities = client.recognizePiiEntities(document, "en", options);
validatePiiEntities(Arrays.asList(getPiiEntitiesList1().get(1)), entities.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) -> {
final RecognizePiiEntitiesResultCollection response = client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntityOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION));
validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) -> {
final Response<RecognizePiiEntitiesResultCollection> response = client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntityOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION), Context.NONE);
validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input -> {
final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities(input)
.stream().collect(Collectors.toList());
validateLinkedEntity(getLinkedEntitiesList1().get(0), linkedEntities.get(0));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.recognizeLinkedEntities(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input ->
assertFalse(client.recognizeLinkedEntities(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200,
client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200,
client.recognizeLinkedEntitiesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, language, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.recognizeLinkedEntities(document).forEach(linkedEntity ->
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity ->
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
assertEquals("monde",
client.extractKeyPhrases(input).iterator().next()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.extractKeyPhrases(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> assertFalse(client.extractKeyPhrases(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200,
client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200,
client.extractKeyPhrasesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, language, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(input ->
client.extractKeyPhrases(input).getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(inputs ->
client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE).getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input -> {
validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), client.analyzeSentiment(input));
});
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input -> {
final DocumentSentiment analyzeSentimentResult = client.analyzeSentiment(input, null);
validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), analyzeSentimentResult);
});
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) -> {
final DocumentSentiment analyzeSentimentResult =
client.analyzeSentiment(input, "en", options);
validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), analyzeSentimentResult);
});
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(document -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.analyzeSentiment(document));
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
validateAnalyzedSentiment(false, expectedDocumentSentiment, client.analyzeSentiment(input));
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions(), Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions())));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions())));
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false))));
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options));
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options)));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null, Context.NONE)));
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, requestOptions, Context.NONE)));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null, Context.NONE)));
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false), Context.NONE)));
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options, Context.NONE));
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options, Context.NONE)));
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareTaskResults = syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareTaskResults.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareTaskResults = syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareTaskResults.stream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPaginationWithTopAndSkip(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options.setSkip(2).setTop(4), Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(2, 4, 3),
healthcareEntitiesResultCollectionPagedFlux.stream().collect(Collectors.toList()));
}, 9);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
() -> client.beginAnalyzeHealthcare(documents, null, Context.NONE).getFinalResult());
assertEquals(errorMessage, exception.getMessage());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
PollResponse<TextAnalyticsOperationResult> pollResponse = syncPoller.poll();
client.beginCancelHealthcareTask(pollResponse.getValue().getResultId(), options, Context.NONE);
syncPoller.waitForCompletion();
});
}
@Disabled("enable it once the service error resolved")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeTasksLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<AnalyzeTasksResult>> syncPoller =
client.beginAnalyzeTasks(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<AnalyzeTasksResult> result = syncPoller.getFinalResult();
validateAnalyzeTasksResultList(options.isIncludeStatistics(),
Arrays.asList(getExpectedAnalyzeTasksResult(
asList(getRecognizeEntitiesResultCollection()),
asList(getRecognizePiiEntitiesResultCollection()),
asList(getExtractKeyPhrasesResultCollection()))),
result.stream().collect(Collectors.toList()));
});
}
@Disabled("enable it once the service error resolved")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
() -> client.beginAnalyzeTasks(documents, null, Context.NONE)
.getFinalResult());
assertEquals(errorMessage, exception.getMessage());
});
}
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
private TextAnalyticsClient getTextAnalyticsClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguageResultCollectionWithResponse(true,
getExpectedBatchDetectedLanguages(), 200,
client.detectLanguageBatchWithResponse(inputs, options, Context.NONE)));
}
/**
* Test Detect batch of documents languages.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) -> validateDetectLanguageResultCollectionWithResponse(false,
getExpectedBatchDetectedLanguages(), 200,
client.detectLanguageBatchWithResponse(inputs, null, Context.NONE)));
}
/**
* Test detect batch languages for a list of string input with country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguageResultCollection(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguageBatch(inputs, countryHint, null)));
}
/**
* Test detect batch languages for a list of string input with request options
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) -> validateDetectLanguageResultCollection(true,
getExpectedBatchDetectedLanguages(), client.detectLanguageBatch(inputs, null, options)));
}
/**
* Test detect batch languages for a list of string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) -> validateDetectLanguageResultCollection(
false, getExpectedBatchDetectedLanguages(), client.detectLanguageBatch(inputs, null, null)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a document to detect language.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
validatePrimaryLanguage(getDetectedLanguageEnglish(), client.detectLanguage(input)));
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.detectLanguage(input));
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> validatePrimaryLanguage(client.detectLanguage(input), getUnknownDetectedLanguage()));
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectLanguageBatchWithResponse(inputs, options, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.detectLanguageBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
/**
* Verifies that a TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.detectLanguage(input, countryHint));
assertEquals(INVALID_COUNTRY_HINT, exception.getErrorCode());
});
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
validatePrimaryLanguage(getDetectedLanguageSpanish(), client.detectLanguage(input, countryHint)));
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
validatePrimaryLanguage(getDetectedLanguageSpanish(), client.detectLanguage(input, countryHint)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input -> {
final List<CategorizedEntity> entities = client.recognizeEntities(input).stream().collect(Collectors.toList());
validateCategorizedEntities(getCategorizedEntitiesList1(), entities);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.recognizeEntities(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> assertFalse(client.recognizeEntities(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> {
Response<RecognizeEntitiesResultCollection> response = client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE);
response.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200,
client.recognizeEntitiesBatchWithResponse(inputs, null, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200,
client.recognizeEntitiesBatchWithResponse(inputs, options, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, language, null))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(),
client.recognizeEntitiesBatch(inputs, null, options))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.recognizeEntities(document).forEach(
categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
}),
CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document -> {
final PiiEntityCollection entities = client.recognizePiiEntities(document);
validatePiiEntities(getPiiEntitiesList1(), entities.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(document -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class, () ->
client.recognizePiiEntities(document).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(document -> assertFalse(client.recognizePiiEntities(document).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) -> {
Response<RecognizePiiEntitiesResultCollection> response = client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE);
response.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner(inputs ->
validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200,
client.recognizePiiEntitiesBatchWithResponse(inputs, null, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200,
client.recognizePiiEntitiesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiEntitiesLanguageHintRunner((inputs, language) ->
validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesBatch(inputs, language, null))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizePiiEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document -> {
final PiiEntityCollection result = client.recognizePiiEntities(document);
result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
});
}, PII_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) -> {
final PiiEntityCollection entities = client.recognizePiiEntities(document, "en", options);
validatePiiEntities(Arrays.asList(getPiiEntitiesList1().get(1)), entities.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) -> {
final RecognizePiiEntitiesResultCollection response = client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntityOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION));
validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) -> {
final Response<RecognizePiiEntitiesResultCollection> response = client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntityOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION), Context.NONE);
validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input -> {
final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities(input)
.stream().collect(Collectors.toList());
validateLinkedEntity(getLinkedEntitiesList1().get(0), linkedEntities.get(0));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.recognizeLinkedEntities(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input ->
assertFalse(client.recognizeLinkedEntities(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200,
client.recognizeLinkedEntitiesBatchWithResponse(inputs, null, Context.NONE))
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200,
client.recognizeLinkedEntitiesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, language, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.recognizeLinkedEntitiesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.recognizeLinkedEntities(document).forEach(linkedEntity ->
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity -> linkedEntity.getMatches().forEach(
linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.recognizeLinkedEntities(document).forEach(
linkedEntity ->
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
})),
LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
assertEquals("monde",
client.extractKeyPhrases(input).iterator().next()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(input -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.extractKeyPhrases(input).iterator().hasNext());
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> assertFalse(client.extractKeyPhrases(input).iterator().hasNext()));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200,
client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200,
client.extractKeyPhrasesBatchWithResponse(inputs, options, Context.NONE)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, null, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, language, null)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesBatch(inputs, null, options)));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(input ->
client.extractKeyPhrases(input).getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(inputs ->
client.extractKeyPhrasesBatchWithResponse(inputs, null, Context.NONE).getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.extractKeyPhrasesBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input -> {
validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), client.analyzeSentiment(input));
});
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input -> {
final DocumentSentiment analyzeSentimentResult = client.analyzeSentiment(input, null);
validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), analyzeSentimentResult);
});
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) -> {
final DocumentSentiment analyzeSentimentResult =
client.analyzeSentiment(input, "en", options);
validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), analyzeSentimentResult);
});
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyTextRunner(document -> {
final TextAnalyticsException exception = assertThrows(TextAnalyticsException.class,
() -> client.analyzeSentiment(document));
assertEquals(INVALID_DOCUMENT, exception.getErrorCode());
});
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
validateAnalyzedSentiment(false, expectedDocumentSentiment, client.analyzeSentiment(input));
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs -> {
final HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions(), Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatchWithResponse(inputs, null, Context.NONE));
assertEquals(400, httpResponseException.getResponse().getStatusCode());
TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions())));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions())));
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false))));
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options));
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(),
client.analyzeSentimentBatch(inputs, null, options)));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null, Context.NONE)));
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, requestOptions, Context.NONE)));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null, Context.NONE)));
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false), Context.NONE)));
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options, Context.NONE));
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200,
client.analyzeSentimentBatchWithResponse(inputs, options, Context.NONE)));
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs -> {
final HttpResponseException httpResponseException = assertThrows(HttpResponseException.class,
() -> client.analyzeSentimentBatch(inputs, null, null).stream().findFirst().get());
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
client.analyzeSentiment(document).getSentences().forEach(
sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
}),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareTaskResults = syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareTaskResults.stream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareTaskResults = syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareTaskResults.stream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPaginationWithTopAndSkip(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options.setSkip(2).setTop(4), Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(2, 4, 3),
healthcareEntitiesResultCollectionPagedFlux.stream().collect(Collectors.toList()));
}, 9);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
() -> client.beginAnalyzeHealthcare(documents, null, Context.NONE).getFinalResult());
assertEquals(errorMessage, exception.getMessage());
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options, Context.NONE);
PollResponse<TextAnalyticsOperationResult> pollResponse = syncPoller.poll();
client.beginCancelHealthcareTask(pollResponse.getValue().getResultId(), options, Context.NONE);
syncPoller.waitForCompletion();
});
}
@Disabled("enable it once the service error resolved. Issue: 18798")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
analyzeTasksLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedIterable<AnalyzeTasksResult>> syncPoller =
client.beginAnalyzeTasks(documents, options, Context.NONE);
syncPoller.waitForCompletion();
PagedIterable<AnalyzeTasksResult> result = syncPoller.getFinalResult();
validateAnalyzeTasksResultList(options.isIncludeStatistics(),
Arrays.asList(getExpectedAnalyzeTasksResult(
asList(getRecognizeEntitiesResultCollection()),
asList(getRecognizePiiEntitiesResultCollection()),
asList(getExtractKeyPhrasesResultCollection()))),
result.stream().collect(Collectors.toList()));
});
}
@Disabled("enable it once the service error resolved. Issue: 18798")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
final IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
() -> client.beginAnalyzeTasks(documents, null, Context.NONE)
.getFinalResult());
assertEquals(errorMessage, exception.getMessage());
});
}
} |
Make method for this | public void customize(LibraryCustomization customization) {
PackageCustomization implementation = customization.getPackage("com.azure.storage.file.share.implementation");
ClassCustomization directoriesImpl = implementation.getClass("DirectoriesImpl");
MethodCustomization create = directoriesImpl.getMethod("create");
create.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
create.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getProperties = directoriesImpl.getMethod("getProperties");
getProperties.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization delete = directoriesImpl.getMethod("delete");
delete.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
delete.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setProperties = directoriesImpl.getMethod("setProperties");
setProperties.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setProperties.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setMetadata = directoriesImpl.getMethod("setMetadata");
setMetadata.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setMetadata.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization listFilesAndDirectoriesSegment = directoriesImpl.getMethod("listFilesAndDirectoriesSegment");
listFilesAndDirectoriesSegment.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listFilesAndDirectoriesSegment.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization listHandles = directoriesImpl.getMethod("listHandles");
listHandles.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listHandles.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization forceCloseHandles = directoriesImpl.getMethod("forceCloseHandles");
forceCloseHandles.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
forceCloseHandles.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
ClassCustomization filesImpl = implementation.getClass("FilesImpl");
MethodCustomization create1 = filesImpl.getMethod("create");
create1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
create1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization download = filesImpl.getMethod("download");
download.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
download.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getProperties1 = filesImpl.getMethod("getProperties");
getProperties1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization delete1 = filesImpl.getMethod("delete");
delete1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
delete1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setHttpHeaders = filesImpl.getMethod("setHttpHeaders");
setHttpHeaders.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setHttpHeaders.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setMetadata1 = filesImpl.getMethod("setMetadata");
setMetadata1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setMetadata1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization uploadRange = filesImpl.getMethod("uploadRange");
uploadRange.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
uploadRange.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization uploadRangeFromURL = filesImpl.getMethod("uploadRangeFromURL");
uploadRangeFromURL.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
uploadRangeFromURL.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getRangeList = filesImpl.getMethod("getRangeList");
getRangeList.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getRangeList.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization startCopy = filesImpl.getMethod("startCopy");
startCopy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
startCopy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization abortCopy = filesImpl.getMethod("abortCopy");
abortCopy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
abortCopy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization listHandles1 = filesImpl.getMethod("listHandles");
listHandles1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listHandles1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization forceCloseHandles1 = filesImpl.getMethod("forceCloseHandles");
forceCloseHandles1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
forceCloseHandles1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization acquireLease = filesImpl.getMethod("acquireLease");
acquireLease.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
acquireLease.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization releaseLease = filesImpl.getMethod("releaseLease");
releaseLease.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
releaseLease.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization changeLease = filesImpl.getMethod("changeLease");
changeLease.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
changeLease.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization breakLease = filesImpl.getMethod("breakLease");
breakLease.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
breakLease.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
ClassCustomization servicesImpl = implementation.getClass("ServicesImpl");
MethodCustomization setProperties1 = servicesImpl.getMethod("setProperties");
setProperties1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setProperties1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getProperties2 = servicesImpl.getMethod("getProperties");
getProperties2.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties2.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization listSharesSegment = servicesImpl.getMethod("listSharesSegment");
listSharesSegment.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listSharesSegment.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization listSharesSegmentNext = servicesImpl.getMethod("listSharesSegmentNext");
listSharesSegmentNext.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
listSharesSegmentNext.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
ClassCustomization sharesImpl = implementation.getClass("SharesImpl");
MethodCustomization create2 = sharesImpl.getMethod("create");
create2.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
create2.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getProperties3 = sharesImpl.getMethod("getProperties");
getProperties3.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getProperties3.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization delete2 = sharesImpl.getMethod("delete");
delete2.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
delete2.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization acquireLease1 = sharesImpl.getMethod("acquireLease");
acquireLease1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
acquireLease1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization releaseLease1 = sharesImpl.getMethod("releaseLease");
releaseLease1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
releaseLease1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization changeLease1 = sharesImpl.getMethod("changeLease");
changeLease1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
changeLease1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization renewLease1 = sharesImpl.getMethod("renewLease");
renewLease1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
renewLease1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization breakLease1 = sharesImpl.getMethod("breakLease");
breakLease1.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
breakLease1.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization createSnapshot = sharesImpl.getMethod("createSnapshot");
createSnapshot.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
createSnapshot.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization createPermission = sharesImpl.getMethod("createPermission");
createPermission.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
createPermission.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getPermission = sharesImpl.getMethod("getPermission");
getPermission.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getPermission.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setProperties2 = sharesImpl.getMethod("setProperties");
setProperties2.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setProperties2.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setMetadata2 = sharesImpl.getMethod("setMetadata");
setMetadata2.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setMetadata2.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getAccessPolicy = sharesImpl.getMethod("getAccessPolicy");
getAccessPolicy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getAccessPolicy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization setAccessPolicy = sharesImpl.getMethod("setAccessPolicy");
setAccessPolicy.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
setAccessPolicy.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization getStatistics = sharesImpl.getMethod("getStatistics");
getStatistics.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
getStatistics.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
MethodCustomization restore = sharesImpl.getMethod("restore");
restore.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
restore.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
PackageCustomization implementationModels = customization.getPackage("com.azure.storage.file.share.implementation.models");
implementationModels.getClass("FilesAndDirectoriesListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.file.share.implementation.util.FilesAndDirectoriesListSegmentDeserializer.class)");
PackageCustomization models = customization.getPackage("com.azure.storage.file.share.models");
models.getClass("ShareFileRangeList").addAnnotation("@JsonDeserialize(using = ShareFileRangeListDeserializer.class)");
ClassCustomization shareFileHttpHeaders = models.getClass("ShareFileHttpHeaders");
shareFileHttpHeaders.removeAnnotation("@JacksonXmlRootElement(localName = \"ShareFileHttpHeaders\")");
shareFileHttpHeaders.addAnnotation("@JacksonXmlRootElement(localName = \"share-file-http-headers\")");
ClassCustomization sourceModifiedAccessConditions = models.getClass("SourceModifiedAccessConditions");
sourceModifiedAccessConditions.removeAnnotation("@JacksonXmlRootElement(localName = \"SourceModifiedAccessConditions\")");
sourceModifiedAccessConditions.addAnnotation("@JacksonXmlRootElement(localName = \"source-modified-access-conditions\")");
ClassCustomization shareServiceProperties = models.getClass("ShareServiceProperties");
PropertyCustomization hourMetrics = shareServiceProperties.getProperty("hourMetrics");
hourMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
hourMetrics.addAnnotation("@JsonProperty(value = \"HourMetrics\")");
PropertyCustomization minuteMetrics = shareServiceProperties.getProperty("minuteMetrics");
minuteMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
minuteMetrics.addAnnotation("@JsonProperty(value = \"MinuteMetrics\")");
} | create.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)"); | public void customize(LibraryCustomization customization) {
PackageCustomization implementation = customization.getPackage("com.azure.storage.file.share.implementation");
ClassCustomization directoriesImpl = implementation.getClass("DirectoriesImpl");
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("create"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("delete"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("setProperties"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("setMetadata"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("listFilesAndDirectoriesSegment"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("listHandles"));
modifyUnexpectedResponseExceptionType(directoriesImpl.getMethod("forceCloseHandles"));
ClassCustomization filesImpl = implementation.getClass("FilesImpl");
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("create"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("download"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("delete"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("setHttpHeaders"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("setMetadata"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("uploadRange"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("uploadRangeFromURL"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("getRangeList"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("startCopy"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("abortCopy"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("listHandles"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("forceCloseHandles"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("acquireLease"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("releaseLease"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("changeLease"));
modifyUnexpectedResponseExceptionType(filesImpl.getMethod("breakLease"));
ClassCustomization servicesImpl = implementation.getClass("ServicesImpl");
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("setProperties"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("listSharesSegment"));
modifyUnexpectedResponseExceptionType(servicesImpl.getMethod("listSharesSegmentNext"));
ClassCustomization sharesImpl = implementation.getClass("SharesImpl");
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("create"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("getProperties"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("delete"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("acquireLease"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("releaseLease"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("changeLease"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("renewLease"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("breakLease"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("createSnapshot"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("createPermission"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("getPermission"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("setProperties"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("setMetadata"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("getAccessPolicy"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("setAccessPolicy"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("getStatistics"));
modifyUnexpectedResponseExceptionType(sharesImpl.getMethod("restore"));
PackageCustomization implementationModels = customization.getPackage("com.azure.storage.file.share.implementation.models");
implementationModels.getClass("FilesAndDirectoriesListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.file.share.implementation.util.FilesAndDirectoriesListSegmentDeserializer.class)");
implementationModels.getClass("CopyFileSmbInfo")
.removeAnnotation("@JacksonXmlRootElement(localName = \"CopyFileSmbInfo\")")
.addAnnotation("@JacksonXmlRootElement(localName = \"copy-file-smb-info\")");
PackageCustomization models = customization.getPackage("com.azure.storage.file.share.models");
models.getClass("ShareFileRangeList").addAnnotation("@JsonDeserialize(using = ShareFileRangeListDeserializer.class)");
ClassCustomization shareFileHttpHeaders = models.getClass("ShareFileHttpHeaders");
shareFileHttpHeaders.removeAnnotation("@JacksonXmlRootElement(localName = \"ShareFileHttpHeaders\")");
shareFileHttpHeaders.addAnnotation("@JacksonXmlRootElement(localName = \"share-file-http-headers\")");
ClassCustomization sourceModifiedAccessConditions = models.getClass("SourceModifiedAccessConditions");
sourceModifiedAccessConditions.removeAnnotation("@JacksonXmlRootElement(localName = \"SourceModifiedAccessConditions\")");
sourceModifiedAccessConditions.addAnnotation("@JacksonXmlRootElement(localName = \"source-modified-access-conditions\")");
ClassCustomization shareServiceProperties = models.getClass("ShareServiceProperties");
PropertyCustomization hourMetrics = shareServiceProperties.getProperty("hourMetrics");
hourMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
hourMetrics.addAnnotation("@JsonProperty(value = \"HourMetrics\")");
PropertyCustomization minuteMetrics = shareServiceProperties.getProperty("minuteMetrics");
minuteMetrics.removeAnnotation("@JsonProperty(value = \"Metrics\")");
minuteMetrics.addAnnotation("@JsonProperty(value = \"MinuteMetrics\")");
} | class ShareStorageCustomization extends Customization {
@Override
} | class ShareStorageCustomization extends Customization {
@Override
private void modifyUnexpectedResponseExceptionType(MethodCustomization method) {
method.removeAnnotation("@UnexpectedResponseExceptionType(StorageErrorException.class)");
method.addAnnotation("@UnexpectedResponseExceptionType(com.azure.storage.file.share.models.ShareStorageException.class)");
}
} |
if `textLine.getAppearance() == null` what will be the default value for the object `textAppearance` ? | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | return textAppearance; | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} |
worth validating that appearance has a value before accessing the content | public static void main(final String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
SyncPoller<FormRecognizerOperationResult, List<FormPage>> recognizeContentPoller =
client.beginRecognizeContent(targetStream, sourceFile.length());
List<FormPage> contentPageResults = recognizeContentPoller.getFinalResult();
for (int i = 0; i < contentPageResults.size(); i++) {
final FormPage formPage = contentPageResults.get(i);
System.out.printf("---- Recognized content info for page %d ----%n", i);
System.out.printf("Page has width: %.2f and height: %.2f, measured with unit: %s%n",
formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
final List<FormTable> tables = formPage.getTables();
for (int i1 = 0; i1 < tables.size(); i1++) {
final FormTable formTable = tables.get(i1);
System.out.printf("Table %d has %d rows and %d columns.%n", i1, formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell has text '%s', within bounding box %s.%n", formTableCell.getText(),
formTableCell.getBoundingBox().toString());
});
System.out.println();
}
formPage.getLines().forEach(formLine ->
System.out
.printf("Line %s consists of %d words and has a text style %s with a confidence score of %.2f.%n",
formLine.getText(), formLine.getWords().size(),
formLine.getAppearance().getStyle().getName(),
formLine.getAppearance().getStyle().getConfidence()));
}
} | .printf("Line %s consists of %d words and has a text style %s with a confidence score of %.2f.%n", | public static void main(final String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File sourceFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(sourceFile.toPath());
InputStream targetStream = new ByteArrayInputStream(fileContent);
SyncPoller<FormRecognizerOperationResult, List<FormPage>> recognizeContentPoller =
client.beginRecognizeContent(targetStream, sourceFile.length());
List<FormPage> contentPageResults = recognizeContentPoller.getFinalResult();
for (int i = 0; i < contentPageResults.size(); i++) {
final FormPage formPage = contentPageResults.get(i);
System.out.printf("---- Recognized content info for page %d ----%n", i);
System.out.printf("Page has width: %.2f and height: %.2f, measured with unit: %s%n",
formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
final List<FormTable> tables = formPage.getTables();
for (int i1 = 0; i1 < tables.size(); i1++) {
final FormTable formTable = tables.get(i1);
System.out.printf("Table %d has %d rows and %d columns.%n", i1, formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell -> {
System.out.printf("Cell has text '%s', within bounding box %s.%n", formTableCell.getText(),
formTableCell.getBoundingBox().toString());
});
System.out.println();
}
formPage.getLines().forEach(formLine -> {
if (formLine.getAppearance() != null) {
System.out.printf(
"Line %s consists of %d words and has a text style %s with a confidence score of %.2f.%n",
formLine.getText(), formLine.getWords().size(),
formLine.getAppearance().getStyle().getName(),
formLine.getAppearance().getStyle().getConfidence());
}
});
}
} | class RecognizeContent {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class RecognizeContent {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
oh no. leaked key.... please regenerate the key for this resource | public static void main(final String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("e3a3ba31dac945b0b26347352c403284"))
.endpoint("https:
.buildAsyncClient();
PollerFlux<FormRecognizerOperationResult, List<FormPage>> recognizeContentPoller =
client.beginRecognizeContentFromUrl(
"https:
Mono<List<FormPage>> contentPageResults = recognizeContentPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
contentPageResults.subscribe(formPages -> {
for (int i = 0; i < formPages.size(); i++) {
final FormPage formPage = formPages.get(i);
System.out.printf("---- Recognized content info for page %d ----%n", i);
System.out.printf("Page has width: %f and height: %f, measured with unit: %s%n", formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
final List<FormTable> tables = formPage.getTables();
for (int i1 = 0; i1 < tables.size(); i1++) {
final FormTable formTable = tables.get(i1);
System.out.printf("Table %d has %d rows and %d columns.%n", i1, formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell ->
System.out.printf("Cell has text '%s', within bounding box %s.%n", formTableCell.getText(),
formTableCell.getBoundingBox().toString()));
System.out.println();
}
formPage.getLines().forEach(formLine ->
System.out
.printf(
"Line %s consists of %d words and has a text style %s with a confidence score of %.2f.%n",
formLine.getText(), formLine.getWords().size(),
formLine.getAppearance().getStyle().getName(),
formLine.getAppearance().getStyle().getConfidence()));
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | .endpoint("https: | public static void main(final String[] args) {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
PollerFlux<FormRecognizerOperationResult, List<FormPage>> recognizeContentPoller =
client.beginRecognizeContentFromUrl(
"https:
Mono<List<FormPage>> contentPageResults = recognizeContentPoller
.last()
.flatMap(pollResponse -> {
if (pollResponse.getStatus().isComplete()) {
return pollResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ pollResponse.getStatus()));
}
});
contentPageResults.subscribe(formPages -> {
for (int i = 0; i < formPages.size(); i++) {
final FormPage formPage = formPages.get(i);
System.out.printf("---- Recognized content info for page %d ----%n", i);
System.out.printf("Page has width: %f and height: %f, measured with unit: %s%n", formPage.getWidth(),
formPage.getHeight(),
formPage.getUnit());
final List<FormTable> tables = formPage.getTables();
for (int i1 = 0; i1 < tables.size(); i1++) {
final FormTable formTable = tables.get(i1);
System.out.printf("Table %d has %d rows and %d columns.%n", i1, formTable.getRowCount(),
formTable.getColumnCount());
formTable.getCells().forEach(formTableCell ->
System.out.printf("Cell has text '%s', within bounding box %s.%n", formTableCell.getText(),
formTableCell.getBoundingBox().toString()));
System.out.println();
}
formPage.getLines().forEach(formLine -> {
if (formLine.getAppearance() != null) {
System.out.printf(
"Line %s consists of %d words and has a text style %s with a confidence score of %.2f.%n",
formLine.getText(), formLine.getWords().size(),
formLine.getAppearance().getStyle().getName(),
formLine.getAppearance().getStyle().getConfidence());
}
});
}
});
try {
TimeUnit.MINUTES.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class RecognizeContentFromUrlAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | class RecognizeContentFromUrlAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} |
It would be null, do we set any other default value? | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | return textAppearance; | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} |
I see. Then it might be me not knowing Java. if I do this in .NET `TextAppearance textAppearance = new TextAppearance();` it will create the type with default values. I will have to do ``` if (textLine.getAppearance() == null) return null; ``` explicitly for it to work. if this is not the case for Java, ignore :) | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | return textAppearance; | private static TextAppearance getTextAppearance(TextLine textLine) {
TextStyle textStyle = new TextStyle();
if (textLine.getAppearance() != null && textLine.getAppearance().getStyle() != null) {
if (textLine.getAppearance().getStyle().getName() != null) {
TextStyleHelper.setName(textStyle,
TextStyleName.fromString(textLine.getAppearance().getStyle().getName().toString()));
}
TextStyleHelper.setConfidence(textStyle, textLine.getAppearance().getStyle().getConfidence());
}
TextAppearance textAppearance = new TextAppearance();
TextAppearanceHelper.setStyle(textAppearance, textStyle);
return textAppearance;
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} | class Transforms {
private static final ClientLogger LOGGER = new ClientLogger(Transforms.class);
private static final Pattern NON_DIGIT_PATTERN = Pattern.compile("[^0-9]+");
private static final float DEFAULT_CONFIDENCE_VALUE = 1.0f;
private static final int DEFAULT_TABLE_SPAN = 1;
private Transforms() {
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link RecognizedForm}.
*
* @param analyzeResult The service returned result for analyze custom forms.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @param modelId the unlabeled model Id used for recognition.
* @return The List of {@code RecognizedForm}.
*/
static List<RecognizedForm> toRecognizedForm(AnalyzeResult analyzeResult, boolean includeFieldElements,
String modelId) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<DocumentResult> documentResults = analyzeResult.getDocumentResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<RecognizedForm> extractedFormList;
List<FormPage> formPages = toRecognizedLayout(analyzeResult, includeFieldElements);
if (!CoreUtils.isNullOrEmpty(documentResults)) {
extractedFormList = new ArrayList<>();
for (DocumentResult documentResultItem : documentResults) {
FormPageRange formPageRange;
List<Integer> documentPageRange = documentResultItem.getPageRange();
if (documentPageRange.size() == 2) {
formPageRange = new FormPageRange(documentPageRange.get(0), documentPageRange.get(1));
} else {
formPageRange = new FormPageRange(1, 1);
}
Map<String, FormField> extractedFieldMap = getLabeledFieldMap(documentResultItem, readResults);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
documentResultItem.getDocType(),
formPageRange,
formPages.subList(formPageRange.getFirstPageNumber() - 1, formPageRange.getLastPageNumber()));
RecognizedFormHelper.setFormTypeConfidence(recognizedForm, documentResultItem.getDocTypeConfidence());
if (documentResultItem.getModelId() != null) {
RecognizedFormHelper.setModelId(recognizedForm, documentResultItem.getModelId().toString());
}
extractedFormList.add(recognizedForm);
}
} else {
extractedFormList = new ArrayList<>();
forEachWithIndex(pageResults, ((index, pageResultItem) -> {
StringBuilder formType = new StringBuilder("form-");
int pageNumber = pageResultItem.getPage();
Integer clusterId = pageResultItem.getClusterId();
if (clusterId != null) {
formType.append(clusterId);
}
Map<String, FormField> extractedFieldMap = getUnlabeledFieldMap(includeFieldElements, readResults,
pageResultItem, pageNumber);
final RecognizedForm recognizedForm = new RecognizedForm(
extractedFieldMap,
formType.toString(),
new FormPageRange(pageNumber, pageNumber),
Collections.singletonList(formPages.get(index)));
RecognizedFormHelper.setModelId(recognizedForm, modelId);
extractedFormList.add(recognizedForm);
}));
}
return extractedFormList;
}
/**
* Helper method to transform the service returned {@link AnalyzeResult} to SDK model {@link FormPage}.
*
* @param analyzeResult The service returned result for analyze layouts.
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
*
* @return The List of {@code FormPage}.
*/
static List<FormPage> toRecognizedLayout(AnalyzeResult analyzeResult, boolean includeFieldElements) {
List<ReadResult> readResults = analyzeResult.getReadResults();
List<PageResult> pageResults = analyzeResult.getPageResults();
List<FormPage> formPages = new ArrayList<>();
boolean pageResultsIsNullOrEmpty = CoreUtils.isNullOrEmpty(pageResults);
forEachWithIndex(readResults, ((index, readResultItem) -> {
List<FormTable> perPageTableList = new ArrayList<>();
if (!pageResultsIsNullOrEmpty) {
PageResult pageResultItem = pageResults.get(index);
if (pageResultItem != null) {
perPageTableList = getPageTables(pageResultItem, readResults, pageResultItem.getPage());
}
}
List<FormLine> perPageFormLineList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getLines())) {
perPageFormLineList = getReadResultFormLines(readResultItem);
}
List<FormSelectionMark> perPageFormSelectionMarkList = new ArrayList<>();
if (includeFieldElements && !CoreUtils.isNullOrEmpty(readResultItem.getSelectionMarks())) {
PageResult pageResultItem = pageResults.get(index);
perPageFormSelectionMarkList = getReadResultFormSelectionMarks(readResultItem,
pageResultItem.getPage());
}
formPages.add(getFormPage(readResultItem, perPageTableList, perPageFormLineList,
perPageFormSelectionMarkList));
}));
return formPages;
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormSelectionMark}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param pageNumber The page number.
*
* @return A list of {@code FormSelectionMark}.
*/
static List<FormSelectionMark> getReadResultFormSelectionMarks(ReadResult readResultItem, int pageNumber) {
return readResultItem.getSelectionMarks().stream()
.map(selectionMark -> {
final FormSelectionMark formSelectionMark = new FormSelectionMark(
null, toBoundingBox(selectionMark.getBoundingBox()), pageNumber);
final SelectionMarkState selectionMarkStateImpl = selectionMark.getState();
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
if (SelectionMarkState.SELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (SelectionMarkState.UNSELECTED.equals(selectionMarkStateImpl)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
throw LOGGER.logThrowableAsError(new RuntimeException(
String.format("%s, unsupported selection mark state.", selectionMarkStateImpl)));
}
FormSelectionMarkHelper.setConfidence(formSelectionMark, selectionMark.getConfidence());
FormSelectionMarkHelper.setState(formSelectionMark, selectionMarkState);
return formSelectionMark;
})
.collect(Collectors.toList());
}
/**
* Helper method to get per-page table information.
*
* @param pageResultItem The extracted page level information returned by the service.
* @param readResults The text extraction result returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The list of per page {@code FormTable}.
*/
static List<FormTable> getPageTables(PageResult pageResultItem, List<ReadResult> readResults, int pageNumber) {
if (pageResultItem.getTables() == null) {
return new ArrayList<>();
} else {
return pageResultItem.getTables().stream()
.map(dataTable -> {
FormTable formTable = new FormTable(dataTable.getRows(), dataTable.getColumns(),
dataTable.getCells()
.stream()
.map(dataTableCell -> new FormTableCell(
dataTableCell.getRowIndex(), dataTableCell.getColumnIndex(),
dataTableCell.getRowSpan() == null ? DEFAULT_TABLE_SPAN : dataTableCell.getRowSpan(),
dataTableCell.getColumnSpan() == null
? DEFAULT_TABLE_SPAN : dataTableCell.getColumnSpan(),
dataTableCell.getText(), toBoundingBox(dataTableCell.getBoundingBox()),
dataTableCell.getConfidence(),
dataTableCell.isHeader() == null ? false : dataTableCell.isHeader(),
dataTableCell.isFooter() == null ? false : dataTableCell.isFooter(),
pageNumber, setReferenceElements(dataTableCell.getElements(), readResults)))
.collect(Collectors.toList()), pageNumber);
FormTableHelper.setBoundingBox(formTable, toBoundingBox(dataTable.getBoundingBox()));
return formTable;
})
.collect(Collectors.toList());
}
}
/**
* Helper method to convert the per page {@link ReadResult} item to {@link FormLine}.
*
* @param readResultItem The per page text extraction item result returned by the service.
*
* @return The list of {@code FormLine}.
*/
static List<FormLine> getReadResultFormLines(ReadResult readResultItem) {
return readResultItem.getLines().stream()
.map(textLine -> {
FormLine formLine = new FormLine(
textLine.getText(),
toBoundingBox(textLine.getBoundingBox()),
readResultItem.getPage(),
toWords(textLine.getWords(), readResultItem.getPage()));
FormLineHelper.setAppearance(formLine, getTextAppearance(textLine));
return formLine;
})
.collect(Collectors.toList());
}
/**
* Private method to get the appearance from the service side text line object.
* @param textLine The service side text line object.
* @return the custom type Appearance model.
*/
/**
* The field map returned on analyze with an unlabeled model id.
*
* @param documentResultItem The extracted document level information.
* @param readResults The text extraction result returned by the service.
* @return The {@link RecognizedForm
*/
private static Map<String, FormField> getLabeledFieldMap(DocumentResult documentResultItem,
List<ReadResult> readResults) {
Map<String, FormField> recognizedFieldMap = new LinkedHashMap<>();
if (!CoreUtils.isNullOrEmpty(documentResultItem.getFields())) {
documentResultItem.getFields().forEach((key, fieldValue) -> {
if (fieldValue != null) {
List<FormElement> formElementList = setReferenceElements(fieldValue.getElements(), readResults);
FieldData valueData;
if ("ReceiptType".equals(key) || ARRAY == fieldValue.getType()) {
valueData = null;
} else {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(), formElementList);
}
recognizedFieldMap.put(key, setFormField(key, valueData, fieldValue, readResults));
} else {
recognizedFieldMap.put(key, new FormField(key, null, null, null,
DEFAULT_CONFIDENCE_VALUE));
}
});
}
return recognizedFieldMap;
}
/**
* Helper method that converts the incoming service field value to one of the strongly typed SDK level
* {@link FormField} with reference elements set when {@code includeFieldElements} is set to true.
*
* @param name The name of the field.
* @param valueData The value text of the field.
* @param fieldValue The named field values returned by the service.
* @param readResults The text extraction result returned by the service.
*
* @return The strongly typed {@link FormField} for the field input.
*/
private static FormField setFormField(String name, FieldData valueData, FieldValue fieldValue,
List<ReadResult> readResults) {
com.azure.ai.formrecognizer.models.FieldValue value;
switch (fieldValue.getType()) {
case PHONE_NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValuePhoneNumber(),
FieldValueType.PHONE_NUMBER);
break;
case STRING:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueString(),
FieldValueType.STRING);
break;
case TIME:
LocalTime fieldTime = fieldValue.getValueTime() == null ? null : LocalTime
.parse(fieldValue.getValueTime(), DateTimeFormatter.ofPattern("HH:mm:ss"));
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldTime, FieldValueType.TIME);
break;
case DATE:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueDate(),
FieldValueType.DATE);
break;
case INTEGER:
com.azure.ai.formrecognizer.models.FieldValue longFieldValue;
if (fieldValue.getValueInteger() == null) {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(null, FieldValueType.LONG);
} else {
longFieldValue =
new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueInteger().longValue(),
FieldValueType.LONG);
}
value = longFieldValue;
break;
case NUMBER:
value = new com.azure.ai.formrecognizer.models.FieldValue(fieldValue.getValueNumber(),
FieldValueType.FLOAT);
break;
case ARRAY:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueArray(fieldValue.getValueArray(), readResults), FieldValueType.LIST);
break;
case OBJECT:
value = new com.azure.ai.formrecognizer.models.FieldValue(
toFieldValueObject(fieldValue.getValueObject(), readResults), FieldValueType.MAP);
break;
case SELECTION_MARK:
com.azure.ai.formrecognizer.models.SelectionMarkState selectionMarkState;
final FieldValueSelectionMark fieldValueSelectionMarkState = fieldValue.getValueSelectionMark();
if (FieldValueSelectionMark.SELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.SELECTED;
} else if (FieldValueSelectionMark.UNSELECTED.equals(fieldValueSelectionMarkState)) {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.UNSELECTED;
} else {
selectionMarkState = com.azure.ai.formrecognizer.models.SelectionMarkState.fromString(
fieldValue.getText());
}
value = new com.azure.ai.formrecognizer.models.FieldValue(selectionMarkState,
FieldValueType.SELECTION_MARK_STATE);
break;
default:
throw LOGGER.logExceptionAsError(new RuntimeException("FieldValue Type not supported"));
}
return new FormField(name, null, valueData, value,
setDefaultConfidenceValue(fieldValue.getConfidence()));
}
/**
* Helper method to set default confidence value if confidence returned by service is null.
*
* @param confidence the confidence returned by service.
*
* @return the field confidence value.
*/
private static float setDefaultConfidenceValue(Float confidence) {
return confidence == null ? DEFAULT_CONFIDENCE_VALUE : confidence;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level map of {@link FormField}.
*
* @param valueObject The array of field values returned by the service in {@link FieldValue
*
* @return The Map of {@link FormField}.
*/
private static Map<String, FormField> toFieldValueObject(Map<String, FieldValue> valueObject,
List<ReadResult> readResults) {
Map<String, FormField> fieldValueObjectMap = new TreeMap<>();
valueObject.forEach((key, fieldValue) ->
fieldValueObjectMap.put(key,
setFormField(key,
new FieldData(fieldValue.getText(),
toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults)),
fieldValue,
readResults)
));
return fieldValueObjectMap;
}
/**
* Helper method to convert the service returned
* {@link com.azure.ai.formrecognizer.implementation.models.FieldValue
* to a SDK level List of {@link FormField}.
*
* @param valueArray The array of field values returned by the service in {@link FieldValue
* @param readResults The text extraction result returned by the service.
* @return The List of {@link FormField}.
*/
private static List<FormField> toFieldValueArray(List<FieldValue> valueArray, List<ReadResult> readResults) {
return valueArray.stream()
.map(fieldValue -> {
FieldData valueData = null;
if (ARRAY != fieldValue.getType()
&& (fieldValue.getPage() != null && fieldValue.getBoundingBox() != null
&& fieldValue.getText() != null)) {
valueData = new FieldData(fieldValue.getText(), toBoundingBox(fieldValue.getBoundingBox()),
fieldValue.getPage(),
setReferenceElements(fieldValue.getElements(), readResults));
}
return setFormField(null, valueData, fieldValue, readResults);
})
.collect(Collectors.toList());
}
/**
* Helper method to convert the page results to {@code FormPage form pages}.
*
* @param readResultItem The per page text extraction item result returned by the service.
* @param perPageTableList The per page tables list.
* @param perPageLineList The per page form lines.
* @param perPageSelectionMarkList The per page selection marks.
*
* @return The per page {@code FormPage}.
*/
private static FormPage getFormPage(ReadResult readResultItem, List<FormTable> perPageTableList,
List<FormLine> perPageLineList, List<FormSelectionMark> perPageSelectionMarkList) {
FormPage formPage = new FormPage(
readResultItem.getHeight(),
readResultItem.getAngle(),
LengthUnit.fromString(readResultItem.getUnit().toString()),
readResultItem.getWidth(),
perPageLineList,
perPageTableList,
readResultItem.getPage());
FormPageHelper.setSelectionMarks(formPage, perPageSelectionMarkList);
return formPage;
}
/**
* Helper method to set the {@link RecognizedForm
* service.
*
* @param includeFieldElements Boolean to indicate if to set reference elements data on fields.
* @param readResults The text extraction result returned by the service.
* @param pageResultItem The extracted page level information returned by the service.
* @param pageNumber The 1 based page number on which these fields exist.
*
* @return The fields populated on {@link RecognizedForm
*/
private static Map<String, FormField> getUnlabeledFieldMap(boolean includeFieldElements,
List<ReadResult> readResults,
PageResult pageResultItem, int pageNumber) {
Map<String, FormField> formFieldMap = new LinkedHashMap<>();
List<KeyValuePair> keyValuePairs = pageResultItem.getKeyValuePairs();
forEachWithIndex(keyValuePairs, ((index, keyValuePair) -> {
List<FormElement> formKeyContentList = new ArrayList<>();
List<FormElement> formValueContentList = new ArrayList<>();
if (includeFieldElements) {
formKeyContentList = setReferenceElements(keyValuePair.getKey().getElements(), readResults);
formValueContentList = setReferenceElements(keyValuePair.getValue().getElements(), readResults
);
}
FieldData labelData = new FieldData(keyValuePair.getKey().getText(),
toBoundingBox(keyValuePair.getKey().getBoundingBox()), pageNumber, formKeyContentList);
FieldData valueData = new FieldData(keyValuePair.getValue().getText(),
toBoundingBox(keyValuePair.getValue().getBoundingBox()), pageNumber, formValueContentList);
String fieldName = "field-" + index;
FormField formField = new FormField(fieldName, labelData, valueData,
new com.azure.ai.formrecognizer.models.FieldValue(keyValuePair.getValue().getText(),
FieldValueType.STRING),
setDefaultConfidenceValue(keyValuePair.getConfidence())
);
formFieldMap.put(fieldName, formField);
}));
return formFieldMap;
}
/**
* Helper method to set the text reference elements on FieldValue/fields when {@code includeFieldElements} set to
* true.
*
* @return The list if referenced elements.
*/
private static List<FormElement> setReferenceElements(List<String> elements,
List<ReadResult> readResults) {
if (CoreUtils.isNullOrEmpty(elements)) {
return new ArrayList<>();
}
List<FormElement> formElementList = new ArrayList<>();
elements.forEach(elementString -> {
String[] indices = NON_DIGIT_PATTERN.matcher(elementString).replaceAll(" ").trim().split(" ");
if (indices.length < 2) {
throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find corresponding reference elements "
+ "for the field value."));
}
int readResultIndex = Integer.parseInt(indices[0]);
int lineIndex = Integer.parseInt(indices[1]);
if (indices.length == 3) {
int wordIndex = Integer.parseInt(indices[2]);
TextWord textWord =
readResults.get(readResultIndex).getLines().get(lineIndex).getWords().get(wordIndex);
FormWord wordElement = new FormWord(textWord.getText(), toBoundingBox(textWord.getBoundingBox()),
readResultIndex + 1, setDefaultConfidenceValue(textWord.getConfidence()));
formElementList.add(wordElement);
} else {
TextLine textLine = readResults.get(readResultIndex).getLines().get(lineIndex);
FormLine lineElement = new FormLine(textLine.getText(), toBoundingBox(textLine.getBoundingBox()),
readResultIndex + 1, toWords(textLine.getWords(), readResultIndex + 1));
FormLineHelper.setAppearance(lineElement, getTextAppearance(textLine));
formElementList.add(lineElement);
}
});
return formElementList;
}
/**
* Helper method to convert the service level {@link TextWord} to list of SDK level model {@link FormWord}.
*
* @param words A list of word reference elements returned by the service.
* @param pageNumber The 1 based page number on which this word element exists.
*
* @return The list of {@code FormWord words}.
*/
private static List<FormWord> toWords(List<TextWord> words, int pageNumber) {
return words.stream()
.map(textWord -> new FormWord(
textWord.getText(),
toBoundingBox(textWord.getBoundingBox()),
pageNumber,
setDefaultConfidenceValue(textWord.getConfidence()))
).collect(Collectors.toList());
}
/**
* Helper method to convert the service level modeled eight numbers representing the four points to SDK level
* {@link FieldBoundingBox}.
*
* @param serviceBoundingBox A list of eight numbers representing the four points of a box.
*
* @return A {@link FieldBoundingBox}.
*/
private static FieldBoundingBox toBoundingBox(List<Float> serviceBoundingBox) {
if (CoreUtils.isNullOrEmpty(serviceBoundingBox) || (serviceBoundingBox.size() % 2) != 0) {
return null;
}
List<Point> pointList = new ArrayList<>();
for (int i = 0; i < serviceBoundingBox.size(); i++) {
pointList.add(new Point(serviceBoundingBox.get(i), serviceBoundingBox.get(++i)));
}
return new FieldBoundingBox(pointList);
}
} |
I think its probably better to just wrap BlobIS since it takes care of like buffering and stuff so we dont need to be issuing multiple reads for no reason. | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.inputStream.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
} | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream inputStream;
private final NioBlobOutputStream outputStream;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.inputStream = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.outputStream = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.outputStream = outputStream;
this.inputStream = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.outputStream.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by just playing with the
position variable and skipping the actual read. We'll check in read if we've seeked past the end and short
circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.inputStream.reset();
this.inputStream.mark(Integer.MAX_VALUE);
this.inputStream.skip(newPosition);
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (inputStream != null) {
return inputStream.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.inputStream != null) {
this.inputStream.close();
} else {
this.outputStream.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.inputStream == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.outputStream == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | |
Can we just add a comment here that says if we are reading size = size of file. if we are writing size = size of file written so far | public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (inputStream != null) {
return inputStream.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
} | if (inputStream != null) { | public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream inputStream;
private final NioBlobOutputStream outputStream;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.inputStream = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.outputStream = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.outputStream = outputStream;
this.inputStream = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.inputStream.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
}
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.outputStream.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by just playing with the
position variable and skipping the actual read. We'll check in read if we've seeked past the end and short
circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.inputStream.reset();
this.inputStream.mark(Integer.MAX_VALUE);
this.inputStream.skip(newPosition);
this.position = newPosition;
return this;
}
@Override
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.inputStream != null) {
this.inputStream.close();
} else {
this.outputStream.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.inputStream == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.outputStream == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
}
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
Can you clarify this doc a little - "we can get around that by modifying the position variable and skipping the actual read (when read is called next). " | public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by just playing with the
position variable and skipping the actual read. We'll check in read if we've seeked past the end and short
circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.inputStream.reset();
this.inputStream.mark(Integer.MAX_VALUE);
this.inputStream.skip(newPosition);
this.position = newPosition;
return this;
} | position variable and skipping the actual read. We'll check in read if we've seeked past the end and short | public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream inputStream;
private final NioBlobOutputStream outputStream;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.inputStream = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.outputStream = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.outputStream = outputStream;
this.inputStream = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.inputStream.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
}
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.outputStream.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (inputStream != null) {
return inputStream.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.inputStream != null) {
this.inputStream.close();
} else {
this.outputStream.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.inputStream == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.outputStream == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
}
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
A small performance improvement would be checking `ByteBuffer.hasArray()` and if so use the backing array instead of instantiating a new one. ```java byte[] buf; if (dst.hasArray()) { buf = dst.array(); } else { buf = new byte[len]; } // Reading logic if (!dst.hasArray()) { dst.put(buf, 0, count); } ``` | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
} | byte[] buf = new byte[len]; | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
Same comment as above where we could re-use a backing array. | public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.writer.write(buf);
return length;
} | byte[] buf = new byte[length]; | public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
}
@Override
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
}
@Override
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
Good idea! | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf = new byte[len];
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
dst.put(buf, 0, count);
this.position += count;
return count;
} | byte[] buf = new byte[len]; | public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf = new byte[length];
src.get(buf);
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
@Override
public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
}
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
I realized in this case we'll also need to modify the position of the `ByteBuffer`: ```java buf.position(buf.position() + length) ``` | public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
byte[] buf;
if (src.hasArray()) {
buf = src.array();
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
} | buf = src.array(); | public int write(ByteBuffer src) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateWriteMode();
int length = src.remaining();
this.position += src.remaining();
/*
If the buffer is backed by an array, we can read directly from that instead of allocating new memory.
Set the position correctly if we read from underneath the buffer
*/
byte[] buf;
if (src.hasArray()) {
buf = src.array();
src.position(src.position() + length);
} else {
buf = new byte[length];
src.get(buf);
}
this.writer.write(buf);
return length;
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
if (!dst.hasArray()) {
dst.put(buf, 0, count);
}
this.position += count;
return count;
}
@Override
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} | class AzureSeekableByteChannel implements SeekableByteChannel {
private final ClientLogger logger = new ClientLogger(AzureSeekableByteChannel.class);
private final NioBlobInputStream reader;
private final NioBlobOutputStream writer;
private long position;
private boolean closed = false;
private final Path path;
/*
If this type needs to be made threadsafe, closed should be volatile. We need to add a lock to guard updates to
position or make it an atomicLong. If we have a lock, we have to be careful about holding while doing io ops and at
least ensure timeouts are set. We probably have to duplicate or copy the buffers for at least writing to ensure they
don't get overwritten.
*/
AzureSeekableByteChannel(NioBlobInputStream inputStream, Path path) {
this.reader = inputStream;
/*
We mark at the beginning (we always construct a stream to the beginning of the blob) to support seeking. We can
effectively seek anywhere by always marking at the beginning of the blob and then a seek is resetting to that
mark and skipping.
*/
inputStream.mark(Integer.MAX_VALUE);
this.writer = null;
this.position = 0;
this.path = path;
}
AzureSeekableByteChannel(NioBlobOutputStream outputStream, Path path) {
this.writer = outputStream;
this.reader = null;
this.position = 0;
this.path = path;
}
@Override
public int read(ByteBuffer dst) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (this.position > this.size()) {
return -1;
}
int count = 0;
int len = dst.remaining();
byte[] buf;
if (dst.hasArray()) {
buf = dst.array();
} else {
buf = new byte[len];
}
while (count < len) {
int retCount = this.reader.read(buf, count, len - count);
if (retCount == -1) {
break;
}
count += retCount;
}
/*
Either write to the destination if we had to buffer separately or just set the position correctly if we wrote
underneath the buffer
*/
if (!dst.hasArray()) {
dst.put(buf, 0, count);
} else {
dst.position(dst.position() + count);
}
this.position += count;
return count;
}
@Override
@Override
public long position() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
return this.position;
}
@Override
public AzureSeekableByteChannel position(long newPosition) throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
validateReadMode();
if (newPosition < 0) {
throw LoggingUtility.logError(logger, new IllegalArgumentException("Seek position cannot be negative"));
}
/*
The javadoc says seeking past the end for reading is legal and that it should indicate the end of the file on
the next read. StorageInputStream doesn't allow this, but we can get around that by modifying the
position variable and skipping the actual read (when read is called next); we'll check in read if we've seeked
past the end and short circuit there as well.
Because we are in read mode this will always give us the size from properties.
*/
if (newPosition > this.size()) {
this.position = newPosition;
return this;
}
this.reader.reset();
this.reader.mark(Integer.MAX_VALUE);
long skipAmount = this.reader.skip(newPosition);
if (skipAmount < newPosition) {
throw new IOException("Could not set desired position");
}
this.position = newPosition;
return this;
}
@Override
public long size() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
validateOpen();
/*
If we are in read mode, the size is the size of the file.
If we are in write mode, the size is the amount of data written so far.
*/
if (reader != null) {
return reader.getBlobInputStream().getProperties().getBlobSize();
} else {
return position;
}
}
@Override
public AzureSeekableByteChannel truncate(long size) throws IOException {
throw LoggingUtility.logError(logger, new UnsupportedOperationException());
}
@Override
public boolean isOpen() {
AzurePath.ensureFileSystemOpen(this.path);
return !this.closed;
}
@Override
public void close() throws IOException {
AzurePath.ensureFileSystemOpen(this.path);
if (this.reader != null) {
this.reader.close();
} else {
this.writer.close();
}
this.closed = true;
}
private void validateOpen() throws ClosedChannelException {
if (this.closed) {
throw LoggingUtility.logError(logger, new ClosedChannelException());
}
}
private void validateReadMode() {
if (this.reader == null) {
throw LoggingUtility.logError(logger, new NonReadableChannelException());
}
}
private void validateWriteMode() {
if (this.writer == null) {
throw LoggingUtility.logError(logger, new NonWritableChannelException());
}
}
} |
The overload this is calling is checking for null. So, it's not required to check here again. | public EventData(BinaryData body) {
this(Objects.requireNonNull(body, "'body' cannot be null."), new SystemProperties(), Context.NONE);
} | this(Objects.requireNonNull(body, "'body' cannot be null."), new SystemProperties(), Context.NONE); | public EventData(BinaryData body) {
this(body, new SystemProperties(), Context.NONE);
} | class EventData {
/*
* These are properties owned by the service and set when a message is received.
*/
static final Set<String> RESERVED_SYSTEM_PROPERTIES;
private final Map<String, Object> properties;
private final BinaryData body;
private final SystemProperties systemProperties;
private Context context;
static {
final Set<String> properties = new HashSet<>();
properties.add(OFFSET_ANNOTATION_NAME.getValue());
properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue());
properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
properties.add(PUBLISHER_ANNOTATION_NAME.getValue());
RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties);
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null.")));
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
/**
* Creates an event with the given {@code body}, system properties and context.
*
* @param body The data to set for this event.
* @param systemProperties System properties set by message broker for this event.
* @param context A specified key-value pair of type {@link Context}.
* @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}.
*/
EventData(BinaryData body, SystemProperties systemProperties, Context context) {
this.body = Objects.requireNonNull(body, "'body' cannot be null.");
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null.");
this.properties = new HashMap<>();
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}.
*/
public Map<String, Object> getProperties() {
return properties;
}
/**
* Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are
* only present on a <b>received</b> {@link EventData}.
*
* @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}.
* {@code null} if the {@link EventData} is not received from the Event Hubs service.
*/
public Map<String, Object> getSystemProperties() {
return systemProperties;
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
return body.toBytes();
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(body.toBytes(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
public BinaryData getBodyAsBinaryData() {
return body;
}
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
return systemProperties.getOffset();
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return systemProperties.getPartitionKey();
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
return systemProperties.getEnqueuedTime();
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
return systemProperties.getSequenceNumber();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(body.toBytes(), eventData.body.toBytes());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(body.toBytes());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
* @throws NullPointerException if {@code key} or {@code value} is null.
* @return The updated {@link EventData}.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
/**
* A collection of properties populated by Azure Event Hubs service.
*/
static class SystemProperties extends HashMap<String, Object> {
private static final long serialVersionUID = -2827050124966993723L;
private final Long offset;
private final String partitionKey;
private final Instant enqueuedTime;
private final Long sequenceNumber;
SystemProperties() {
super();
offset = null;
partitionKey = null;
enqueuedTime = null;
sequenceNumber = null;
}
SystemProperties(final Map<String, Object> map) {
super(map);
this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue());
final String offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue());
if (offset == null) {
throw new IllegalStateException(String.format(Locale.US,
"offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue()));
}
this.offset = Long.valueOf(offset);
put(OFFSET_ANNOTATION_NAME.getValue(), this.offset);
final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
if (enqueuedTimeValue == null) {
throw new IllegalStateException(String.format(Locale.US,
"enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()));
}
this.enqueuedTime = enqueuedTimeValue.toInstant();
put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime);
final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
if (sequenceNumber == null) {
throw new IllegalStateException(String.format(Locale.US,
"sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()));
}
this.sequenceNumber = sequenceNumber;
put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber);
}
/**
* Gets the offset within the Event Hubs stream.
*
* @return The offset within the Event Hubs stream.
*/
private Long getOffset() {
return offset;
}
/**
* Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to
* select a partition to send the message to.
*
* @return A partition key for this Event Data.
*/
private String getPartitionKey() {
return partitionKey;
}
/**
* Gets the time this event was enqueued in the Event Hub.
*
* @return The time this was enqueued in the service.
*/
private Instant getEnqueuedTime() {
return enqueuedTime;
}
/**
* Gets the sequence number in the event stream for this event. This is unique for every message received in the
* Event Hub.
*
* @return Sequence number for this event.
* @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved
* event.
*/
private Long getSequenceNumber() {
return sequenceNumber;
}
@SuppressWarnings("unchecked")
private <T> T removeSystemProperty(final String key) {
if (this.containsKey(key)) {
return (T) (this.remove(key));
}
return null;
}
}
} | class EventData {
/*
* These are properties owned by the service and set when a message is received.
*/
static final Set<String> RESERVED_SYSTEM_PROPERTIES;
private final Map<String, Object> properties;
private final BinaryData body;
private final SystemProperties systemProperties;
private Context context;
static {
final Set<String> properties = new HashSet<>();
properties.add(OFFSET_ANNOTATION_NAME.getValue());
properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue());
properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
properties.add(PUBLISHER_ANNOTATION_NAME.getValue());
RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties);
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null.")));
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
/**
* Creates an event with the given {@code body}, system properties and context.
*
* @param body The data to set for this event.
* @param systemProperties System properties set by message broker for this event.
* @param context A specified key-value pair of type {@link Context}.
* @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}.
*/
EventData(BinaryData body, SystemProperties systemProperties, Context context) {
this.body = Objects.requireNonNull(body, "'body' cannot be null.");
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null.");
this.properties = new HashMap<>();
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}.
*/
public Map<String, Object> getProperties() {
return properties;
}
/**
* Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are
* only present on a <b>received</b> {@link EventData}.
*
* @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}.
* {@code null} if the {@link EventData} is not received from the Event Hubs service.
*/
public Map<String, Object> getSystemProperties() {
return systemProperties;
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
return body.toBytes();
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(body.toBytes(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
public BinaryData getBodyAsBinaryData() {
return body;
}
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
return systemProperties.getOffset();
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return systemProperties.getPartitionKey();
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
return systemProperties.getEnqueuedTime();
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
return systemProperties.getSequenceNumber();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(body.toBytes(), eventData.body.toBytes());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(body.toBytes());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
* @throws NullPointerException if {@code key} or {@code value} is null.
* @return The updated {@link EventData}.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
/**
* A collection of properties populated by Azure Event Hubs service.
*/
static class SystemProperties extends HashMap<String, Object> {
private static final long serialVersionUID = -2827050124966993723L;
private final Long offset;
private final String partitionKey;
private final Instant enqueuedTime;
private final Long sequenceNumber;
SystemProperties() {
super();
offset = null;
partitionKey = null;
enqueuedTime = null;
sequenceNumber = null;
}
SystemProperties(final Map<String, Object> map) {
super(map);
this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue());
final String offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue());
if (offset == null) {
throw new IllegalStateException(String.format(Locale.US,
"offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue()));
}
this.offset = Long.valueOf(offset);
put(OFFSET_ANNOTATION_NAME.getValue(), this.offset);
final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
if (enqueuedTimeValue == null) {
throw new IllegalStateException(String.format(Locale.US,
"enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()));
}
this.enqueuedTime = enqueuedTimeValue.toInstant();
put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime);
final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
if (sequenceNumber == null) {
throw new IllegalStateException(String.format(Locale.US,
"sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()));
}
this.sequenceNumber = sequenceNumber;
put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber);
}
/**
* Gets the offset within the Event Hubs stream.
*
* @return The offset within the Event Hubs stream.
*/
private Long getOffset() {
return offset;
}
/**
* Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to
* select a partition to send the message to.
*
* @return A partition key for this Event Data.
*/
private String getPartitionKey() {
return partitionKey;
}
/**
* Gets the time this event was enqueued in the Event Hub.
*
* @return The time this was enqueued in the service.
*/
private Instant getEnqueuedTime() {
return enqueuedTime;
}
/**
* Gets the sequence number in the event stream for this event. This is unique for every message received in the
* Event Hub.
*
* @return Sequence number for this event.
* @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved
* event.
*/
private Long getSequenceNumber() {
return sequenceNumber;
}
@SuppressWarnings("unchecked")
private <T> T removeSystemProperty(final String key) {
if (this.containsKey(key)) {
return (T) (this.remove(key));
}
return null;
}
}
} |
? | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner().digitalTwins().delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion).withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager.inner().digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | |
Why list and delete all instances? This would make concurrent runs fail. | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner().digitalTwins().delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion).withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager.inner().digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator(); | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} |
Shouldn't these be on separate lines? That is `.withExistingResourceGroup` deserves to be a top-level indent like the others, right? | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner().digitalTwins().delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion).withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager.inner().digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | .withRegion(defaultRegion).withExistingResourceGroup(rgName) | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} |
we don't delete them all, we only delete the instance that is failing the name availability. on line 63 we are checking the name of the instance. | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner().digitalTwins().delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion).withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager.inner().digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator(); | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} |
removed ! | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager.digitalTwins().listAsync().toBlocking().getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner().digitalTwins().delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion).withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager.inner().digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | |
Why list and delete the one that matches, instead of just calling delete on one by name? | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} |
very good question. Initially when I was generating the resource group name randomly, we couldn't do that because we didn't have the resource group name that the conflict happened on, but now that we are going with a single resource group name, we can do that. I will wait until I hear back from the china team regarding that issue with the recorded tests and update the PR hopefully tonight. | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager | public void lifecycleTest() throws InterruptedException {
rgName = defaultResourceGroupName;
ResourceGroup group = resourceManager.resourceGroups()
.define(rgName)
.withRegion(defaultRegion)
.create();
Assert.assertNotNull(group);
try {
CheckNameResultInner checkNameResult = digitalTwinsManager.inner().digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
if (!checkNameResult.nameAvailable()) {
Iterator<DigitalTwinsDescription> allDigitalTwins = digitalTwinsManager
.digitalTwins()
.listAsync()
.toBlocking()
.getIterator();
while (allDigitalTwins.hasNext()) {
DigitalTwinsDescription digitalTwin = allDigitalTwins.next();
if (digitalTwin.name().equals(defaultInstanceName)){
String existingResourceGroupName = digitalTwin.resourceGroupName();
digitalTwinsManager.inner()
.digitalTwins()
.delete(existingResourceGroupName, defaultInstanceName);
break;
}
}
checkNameResult = digitalTwinsManager.inner()
.digitalTwins()
.checkNameAvailability(defaultRegion, defaultInstanceName);
Assert.assertTrue(checkNameResult.nameAvailable());
}
DigitalTwinsDescription instance = digitalTwinsManager
.digitalTwins()
.defineDigitalTwinsInstance(defaultInstanceName)
.withRegion(defaultRegion)
.withExistingResourceGroup(rgName)
.create();
Assert.assertNotNull(instance);
Assert.assertEquals(defaultInstanceName, instance.name());
Assert.assertEquals(defaultRegion, instance.region().toString());
final String key1 = "Key1";
final String value1 = "Value1";
final String key2 = "Key2";
final String value2 = "Value2";
DigitalTwinsDescriptionInner updatedDt = digitalTwinsManager.inner()
.digitalTwins()
.update(rgName, defaultInstanceName, new DigitalTwinsPatchDescription()
.withTags(new HashMap<String, String>(){{
put(key1, value1);
put(key2, value2);
}}));
Assert.assertTrue(updatedDt.getTags().get(key1).equals(value1));
Assert.assertTrue(updatedDt.getTags().get(key2).equals(value2));
PagedList list = digitalTwinsManager
.inner()
.digitalTwins().listByResourceGroup(rgName);
Assert.assertTrue(list.size() > 0);
Object[] array = digitalTwinsManager.inner().operations().list().toArray();
ArrayList<String> myOpNames = new ArrayList<>();
for (Object op : array) {
myOpNames.add(((OperationInner) op).name());
}
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/write"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitalTwinsInstances/delete"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/eventroutes/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/digitaltwins/read"));
Assert.assertTrue(myOpNames.contains("Microsoft.DigitalTwins/models/read"));
}
finally {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} | class DigitalTwinsLifecycleTests extends TestBase {
protected ResourceManager resourceManager;
protected DigitalTwinsManager digitalTwinsManager;
private static String rgName;
protected String domain;
private static String defaultInstanceName = "DigitalTwinsSdk";
private static String defaultRegion = Region.US_WEST_CENTRAL.toString();
private static String defaultResourceGroupName = "rg2b9842374ecf6";
@Override
protected void initializeClients(RestClient restClient, String defaultSubscription, String domain) throws IOException {
resourceManager = ResourceManager
.authenticate(restClient)
.withSubscription(defaultSubscription);
digitalTwinsManager = DigitalTwinsManager
.authenticate(restClient, defaultSubscription);
this.domain = domain;
}
@Test
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
} |
nit: casing, `t` in `type` should be capitol `Type can not be null or empty.` | private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Source cannot be null or empty"));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("type cannot be null or empty"));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
} | throw logger.logExceptionAsError(new IllegalArgumentException("type cannot be null or empty")); | private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'source' cannot be null or empty."));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'type' cannot be null or empty."));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
*/
public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent}s.
* @throws IllegalArgumentException if the input parameter isn't a JSON string for a cloud event or an array of it.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
* @throws NullPointerException if source or type is {@code null}.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
* @throws NullPointerException if source or type is {@code null}.
*/
public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}.
* @throws IllegalArgumentException if cloudEventJsonString isn't a JSON string for a cloud event or an array of it.
* @throws NullPointerException if cloudEventJsonString is {@code null}.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} |
Both data and dataContentType can be null. | public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
} | this(source, type); | public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
*/
private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Source cannot be null or empty"));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("type cannot be null or empty"));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent}s.
* @throws IllegalArgumentException if the input parameter isn't a JSON string for a cloud event or an array of it.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
* @throws NullPointerException if source or type is {@code null}.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
* @throws NullPointerException if source or type is {@code null}.
*/
private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'source' cannot be null or empty."));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'type' cannot be null or empty."));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}.
* @throws IllegalArgumentException if cloudEventJsonString isn't a JSON string for a cloud event or an array of it.
* @throws NullPointerException if cloudEventJsonString is {@code null}.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} |
I changed to 'type' because the param name is type. | private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Source cannot be null or empty"));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("type cannot be null or empty"));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
} | throw logger.logExceptionAsError(new IllegalArgumentException("type cannot be null or empty")); | private CloudEvent(String source, String type) {
if (CoreUtils.isNullOrEmpty(source)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'source' cannot be null or empty."));
}
if (CoreUtils.isNullOrEmpty(type)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'type' cannot be null or empty."));
}
this.cloudEvent = new com.azure.messaging.eventgrid.implementation.models.CloudEvent()
.setId(UUID.randomUUID().toString())
.setSource(source)
.setType(type)
.setSpecversion(SPEC_VERSION);
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
*/
public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent}s.
* @throws IllegalArgumentException if the input parameter isn't a JSON string for a cloud event or an array of it.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} | class CloudEvent {
private static final String SPEC_VERSION = "1.0";
private final com.azure.messaging.eventgrid.implementation.models.CloudEvent cloudEvent;
private static final ClientLogger logger = new ClientLogger(CloudEvent.class);
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event. It can't be null or empty.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived". It can't be null or empty.
* @param data the payload of this event. Set to null if your event doesn't have the data payload.
* It will be serialized as a String if it's a String, or application/json if it's not a String.
* @throws NullPointerException if source or type is {@code null}.
*/
public CloudEvent(String source, String type, Object data) {
this(source, type);
this.setData(data);
}
/**
* Create an instance of CloudEvent. The source and type are required fields to publish.
* @param source a URI identifying the origin of the event.
* @param type the type of event, e.g. "Contoso.Items.ItemReceived".
* @param data the payload in bytes of this event. It will be serialized to Base64 format.
* @param dataContentType the type of the data.
* @throws NullPointerException if source or type is {@code null}.
*/
public CloudEvent(String source, String type, byte[] data, String dataContentType) {
this(source, type);
this.setDataBase64(data, dataContentType);
}
/**
* Deserialize the {@link CloudEvent} from a JSON string.
* @param cloudEventJsonString the JSON payload containing one or more events.
*
* @return all of the events in the payload deserialized as {@link CloudEvent CloudEvents}.
* @throws IllegalArgumentException if cloudEventJsonString isn't a JSON string for a cloud event or an array of it.
* @throws NullPointerException if cloudEventJsonString is {@code null}.
*/
public static List<CloudEvent> fromString(String cloudEventJsonString) {
return EventGridDeserializer.deserializeCloudEvents(cloudEventJsonString);
}
/**
* Get the id of the cloud event.
* @return the id.
*/
public String getId() {
return this.cloudEvent.getId();
}
/**
* Set a custom id. Note that a random id is already set by default.
* @param id the id to set.
*
* @return the cloud event itself.
*/
public CloudEvent setId(String id) {
if (CoreUtils.isNullOrEmpty(id)) {
throw new IllegalArgumentException("id cannot be null or empty");
}
this.cloudEvent.setId(id);
return this;
}
/**
* Get the URI source of the event.
* @return the source.
*/
public String getSource() {
return this.cloudEvent.getSource();
}
/**
* Get the data associated with this event as a {@link BinaryData}, which has API to deserialize the data into
* a String, an Object, or a byte[].
* @return A {@link BinaryData} that wraps the this event's data payload.
*/
public BinaryData getData() {
if (cloudEvent.getDataBase64() != null) {
return BinaryData.fromBytes(cloudEvent.getDataBase64());
}
return EventGridDeserializer.getData(cloudEvent.getData());
}
/**
* Set the data associated with this event.
* @param data the data to set.
*
* @return the cloud event itself.
*/
CloudEvent setData(Object data) {
this.cloudEvent.setData(data);
return this;
}
/**
* Set the Base64 data associated with this event.
* @param data the data to set.
* @param dataContentType the data content type of the CloudEvent.
*
* @return the cloud event itself.
*/
private CloudEvent setDataBase64(byte[] data, String dataContentType) {
if (data != null) {
byte[] encoded = Base64.getEncoder().encode(data);
this.cloudEvent.setDataBase64(encoded);
this.cloudEvent.setDatacontenttype(dataContentType);
}
return this;
}
/**
* Set the data content type with this event.
* @param dataContentType the data content type to set.
* @return the cloud event itself.
*/
public CloudEvent setDataContentType(String dataContentType) {
this.cloudEvent.setDatacontenttype(dataContentType);
return this;
}
/**
* Get the type of event, e.g. "Contoso.Items.ItemReceived".
* @return the type of the event.
*/
public String getType() {
return this.cloudEvent.getType();
}
/**
* Get the time associated with the occurrence of the event.
* @return the event time, or null if the time is not set.
*/
public OffsetDateTime getTime() {
return this.cloudEvent.getTime();
}
/**
* Set the time associated with the occurrence of the event.
* @param time the time to set.
*
* @return the cloud event itself.
*/
public CloudEvent setTime(OffsetDateTime time) {
this.cloudEvent.setTime(time);
return this;
}
/**
* Get the content MIME type that the data is in. A null value indicates that the data is either nonexistent or in the
* "application/json" type. Note that "application/json" is still a possible value for this field.
* @return the content type the data is in, or null if the data is nonexistent or in "application/json" format.
*/
public String getDataContentType() {
return this.cloudEvent.getDatacontenttype();
}
/**
* Get the schema that the data adheres to.
* @return a URI of the data schema, or null if it is not set.
*/
public String getDataSchema() {
return this.cloudEvent.getDataschema();
}
/**
* Set the schema that the data adheres to.
* @param dataSchema a URI identifying the schema of the data.
*
* @return the cloud event itself.
*/
public CloudEvent setDataSchema(String dataSchema) {
this.cloudEvent.setDataschema(dataSchema);
return this;
}
/**
* Get the subject associated with this event.
* @return the subject, or null if the subject was not set.
*/
public String getSubject() {
return this.cloudEvent.getSubject();
}
/**
* Set the subject of the event.
* @param subject the subject to set.
*
* @return the cloud event itself.
*/
public CloudEvent setSubject(String subject) {
this.cloudEvent.setSubject(subject);
return this;
}
/**
* Get a map of the additional user-defined attributes associated with this event.
* @return the extension attributes as an unmodifiable map.
*/
public Map<String, Object> getExtensionAttributes() {
if (this.cloudEvent.getAdditionalProperties() == null) {
return null;
}
return Collections.unmodifiableMap(this.cloudEvent.getAdditionalProperties());
}
/**
* Add/Overwrite a single extension attribute to the cloud event. The property name will be transformed
* to lowercase and must not share a name with any reserved cloud event properties.
* @param name the name of the attribute.
* @param value the value to associate with the name.
*
* @return the cloud event itself.
*/
public CloudEvent addExtensionAttribute(String name, Object value) {
if (this.cloudEvent.getAdditionalProperties() == null) {
this.cloudEvent.setAdditionalProperties(new HashMap<>());
}
this.cloudEvent.getAdditionalProperties().put(name.toLowerCase(Locale.ENGLISH), value);
return this;
}
CloudEvent(com.azure.messaging.eventgrid.implementation.models.CloudEvent impl) {
this.cloudEvent = impl;
}
com.azure.messaging.eventgrid.implementation.models.CloudEvent toImpl() {
return this.cloudEvent;
}
} |
It's unfortunate that Jackson doesn't consider this as a breaking change. | public JacksonAdapter() {
this.simpleMapper = initializeMapperBuilder(JsonMapper.builder())
.build();
this.headerMapper = initializeMapperBuilder(JsonMapper.builder())
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES)
.build();
this.xmlMapper = initializeMapperBuilder(XmlMapper.builder())
.defaultUseWrapper(false)
.enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
/*
* In Jackson 2.12 the default value of this feature changed from true to false.
* https:
*/
.enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL)
.build();
ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
this.mapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper))
.addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper))
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
} | * https: | public JacksonAdapter() {
this.simpleMapper = initializeMapperBuilder(JsonMapper.builder())
.build();
this.headerMapper = initializeMapperBuilder(JsonMapper.builder())
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES)
.build();
this.xmlMapper = initializeMapperBuilder(XmlMapper.builder())
.defaultUseWrapper(false)
.enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
/*
* In Jackson 2.12 the default value of this feature changed from true to false.
* https:
*/
.enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL)
.build();
this.xmlMapper.coercionConfigDefaults()
.setCoercion(CoercionInputShape.EmptyString, CoercionAction.AsNull);
ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
this.mapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper))
.addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper))
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
} | class JacksonAdapter implements SerializerAdapter {
private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$");
private final ClientLogger logger = new ClientLogger(JacksonAdapter.class);
/**
* An instance of {@link ObjectMapper} to serialize/deserialize objects.
*/
private final ObjectMapper mapper;
/**
* An instance of {@link ObjectMapper} that does not do flattening.
*/
private final ObjectMapper simpleMapper;
private final ObjectMapper xmlMapper;
private final ObjectMapper headerMapper;
/*
* The lazily-created serializer for this ServiceClient.
*/
private static SerializerAdapter serializerAdapter;
private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>();
/**
* Creates a new JacksonAdapter instance with default mapper settings.
*/
/**
* Gets a static instance of {@link ObjectMapper} that doesn't handle flattening.
*
* @return an instance of {@link ObjectMapper}.
*/
protected ObjectMapper simpleMapper() {
return simpleMapper;
}
/**
* maintain singleton instance of the default serializer adapter.
*
* @return the default serializer
*/
public static synchronized SerializerAdapter createDefaultSerializerAdapter() {
if (serializerAdapter == null) {
serializerAdapter = new JacksonAdapter();
}
return serializerAdapter;
}
/**
* @return the original serializer type
*/
public ObjectMapper serializer() {
return mapper;
}
@Override
public String serialize(Object object, SerializerEncoding encoding) throws IOException {
if (object == null) {
return null;
}
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serialize(object, encoding, stream);
return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8);
}
@Override
public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException {
if (object == null) {
return;
}
if ((encoding == SerializerEncoding.XML)) {
xmlMapper.writeValue(outputStream, object);
} else {
serializer().writeValue(outputStream, object);
}
}
@Override
public String serializeRaw(Object object) {
if (object == null) {
return null;
}
try {
return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll("");
} catch (IOException ex) {
logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex);
return null;
}
}
@Override
public String serializeList(List<?> list, CollectionFormat format) {
if (list == null) {
return null;
}
List<String> serialized = new ArrayList<>();
for (Object element : list) {
String raw = serializeRaw(element);
serialized.add(raw != null ? raw : "");
}
return String.join(format.getDelimiter(), serialized);
}
@Override
public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException {
if (CoreUtils.isNullOrEmpty(value)) {
return null;
}
return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding);
}
@Override
public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding)
throws IOException {
if (inputStream == null) {
return null;
}
final JavaType javaType = createJavaType(type);
try {
if (encoding == SerializerEncoding.XML) {
return xmlMapper.readValue(inputStream, javaType);
} else {
return serializer().readValue(inputStream, javaType);
}
} catch (JsonParseException jpe) {
throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe));
}
}
@Override
public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException {
if (deserializedHeadersType == null) {
return null;
}
/*
* Do we need to serialize and then deserialize the headers? For now transition to using convertValue as it
* allows for some internal optimizations by Jackson.
*/
T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType));
final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType);
final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields();
/*
* A list containing all handlers for header collections of the header type.
*/
final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>();
/*
* This set is an optimization where we track the first character of all HeaderCollections defined on the
* deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than
* getting the first character.
*/
final Set<Character> headerCollectionsFirstCharacters = new HashSet<>();
/*
* Begin by looping over all declared fields and initializing all header collection information.
*/
for (final Field declaredField : declaredFields) {
if (!declaredField.isAnnotationPresent(HeaderCollection.class)) {
continue;
}
final Type declaredFieldType = declaredField.getGenericType();
if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) {
continue;
}
final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType);
if (mapTypeArguments.length != 2
|| mapTypeArguments[0] != String.class
|| mapTypeArguments[1] != String.class) {
continue;
}
final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class);
final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT);
final int headerCollectionPrefixLength = headerCollectionPrefix.length();
if (headerCollectionPrefixLength == 0) {
continue;
}
headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField));
headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0));
}
/*
* Then loop over all headers and check if they begin with any of the prefixes found.
*/
for (final HttpHeader header : headers) {
String headerNameLower = header.getName().toLowerCase(Locale.ROOT);
for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) {
if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) {
continue;
}
if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) {
headerCollectionHandler.addHeader(header.getName(), header.getValue());
}
}
}
/*
* Finally inject all found header collection values into the deserialized headers.
*/
headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger));
return deserializedHeaders;
}
private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) {
mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)
.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.serializationInclusion(JsonInclude.Include.NON_NULL)
.addModule(new JavaTimeModule())
.addModule(ByteArraySerializer.getModule())
.addModule(Base64UrlSerializer.getModule())
.addModule(DateTimeSerializer.getModule())
.addModule(DateTimeDeserializer.getModule())
.addModule(DateTimeRfc1123Serializer.getModule())
.addModule(DurationSerializer.getModule())
.addModule(HttpHeadersSerializer.getModule())
.addModule(UnixTimeSerializer.getModule())
.visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);
return mapper;
}
private JavaType createJavaType(Type type) {
if (type == null) {
return null;
} else if (type instanceof JavaType) {
return (JavaType) type;
} else if (type instanceof ParameterizedType) {
return typeToJavaTypeCache.computeIfAbsent(type, t -> {
final ParameterizedType parameterizedType = (ParameterizedType) type;
final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length];
for (int i = 0; i != actualTypeArguments.length; i++) {
javaTypeArguments[i] = createJavaType(actualTypeArguments[i]);
}
return mapper.getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(),
javaTypeArguments);
});
} else {
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t));
}
}
/*
* Internal helper class that helps manage converting headers into their header collection.
*/
private static final class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
}
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
}
} | class JacksonAdapter implements SerializerAdapter {
private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$");
private final ClientLogger logger = new ClientLogger(JacksonAdapter.class);
/**
* An instance of {@link ObjectMapper} to serialize/deserialize objects.
*/
private final ObjectMapper mapper;
/**
* An instance of {@link ObjectMapper} that does not do flattening.
*/
private final ObjectMapper simpleMapper;
private final ObjectMapper xmlMapper;
private final ObjectMapper headerMapper;
/*
* The lazily-created serializer for this ServiceClient.
*/
private static SerializerAdapter serializerAdapter;
private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>();
/**
* Creates a new JacksonAdapter instance with default mapper settings.
*/
/**
* Gets a static instance of {@link ObjectMapper} that doesn't handle flattening.
*
* @return an instance of {@link ObjectMapper}.
*/
protected ObjectMapper simpleMapper() {
return simpleMapper;
}
/**
* maintain singleton instance of the default serializer adapter.
*
* @return the default serializer
*/
public static synchronized SerializerAdapter createDefaultSerializerAdapter() {
if (serializerAdapter == null) {
serializerAdapter = new JacksonAdapter();
}
return serializerAdapter;
}
/**
* @return the original serializer type
*/
public ObjectMapper serializer() {
return mapper;
}
@Override
public String serialize(Object object, SerializerEncoding encoding) throws IOException {
if (object == null) {
return null;
}
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serialize(object, encoding, stream);
return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8);
}
@Override
public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException {
if (object == null) {
return;
}
if ((encoding == SerializerEncoding.XML)) {
xmlMapper.writeValue(outputStream, object);
} else {
serializer().writeValue(outputStream, object);
}
}
@Override
public String serializeRaw(Object object) {
if (object == null) {
return null;
}
try {
return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll("");
} catch (IOException ex) {
logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex);
return null;
}
}
@Override
public String serializeList(List<?> list, CollectionFormat format) {
if (list == null) {
return null;
}
List<String> serialized = new ArrayList<>();
for (Object element : list) {
String raw = serializeRaw(element);
serialized.add(raw != null ? raw : "");
}
return String.join(format.getDelimiter(), serialized);
}
@Override
public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException {
if (CoreUtils.isNullOrEmpty(value)) {
return null;
}
return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding);
}
@Override
public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding)
throws IOException {
if (inputStream == null) {
return null;
}
final JavaType javaType = createJavaType(type);
try {
if (encoding == SerializerEncoding.XML) {
return xmlMapper.readValue(inputStream, javaType);
} else {
return serializer().readValue(inputStream, javaType);
}
} catch (JsonParseException jpe) {
throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe));
}
}
@Override
public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException {
if (deserializedHeadersType == null) {
return null;
}
T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType));
final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType);
final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields();
/*
* A list containing all handlers for header collections of the header type.
*/
final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>();
/*
* This set is an optimization where we track the first character of all HeaderCollections defined on the
* deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than
* getting the first character.
*/
final Set<Character> headerCollectionsFirstCharacters = new HashSet<>();
/*
* Begin by looping over all declared fields and initializing all header collection information.
*/
for (final Field declaredField : declaredFields) {
if (!declaredField.isAnnotationPresent(HeaderCollection.class)) {
continue;
}
final Type declaredFieldType = declaredField.getGenericType();
if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) {
continue;
}
final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType);
if (mapTypeArguments.length != 2
|| mapTypeArguments[0] != String.class
|| mapTypeArguments[1] != String.class) {
continue;
}
final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class);
final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT);
final int headerCollectionPrefixLength = headerCollectionPrefix.length();
if (headerCollectionPrefixLength == 0) {
continue;
}
headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField));
headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0));
}
/*
* Then loop over all headers and check if they begin with any of the prefixes found.
*/
for (final HttpHeader header : headers) {
String headerNameLower = header.getName().toLowerCase(Locale.ROOT);
/*
* Optimization to skip this header as it doesn't begin with any character starting header collections in
* the deserialized headers type.
*/
if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) {
continue;
}
for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) {
if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) {
headerCollectionHandler.addHeader(header.getName(), header.getValue());
}
}
}
/*
* Finally inject all found header collection values into the deserialized headers.
*/
headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger));
return deserializedHeaders;
}
@SuppressWarnings("deprecation")
private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) {
mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)
.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.serializationInclusion(JsonInclude.Include.NON_NULL)
.addModule(new JavaTimeModule())
.addModule(ByteArraySerializer.getModule())
.addModule(Base64UrlSerializer.getModule())
.addModule(DateTimeSerializer.getModule())
.addModule(DateTimeDeserializer.getModule())
.addModule(DateTimeRfc1123Serializer.getModule())
.addModule(DurationSerializer.getModule())
.addModule(HttpHeadersSerializer.getModule())
.addModule(UnixTimeSerializer.getModule())
.visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);
return mapper;
}
private JavaType createJavaType(Type type) {
if (type == null) {
return null;
} else if (type instanceof JavaType) {
return (JavaType) type;
} else if (type instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) type;
final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length];
for (int i = 0; i != actualTypeArguments.length; i++) {
javaTypeArguments[i] = createJavaType(actualTypeArguments[i]);
}
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory()
.constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments));
} else {
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t));
}
}
/*
* Internal helper class that helps manage converting headers into their header collection.
*/
private static final class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
}
@SuppressWarnings("deprecation")
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
}
} |
Is this guaranteed to be non-null and non-empty? | private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
} | String fieldName = declaringField.getName(); | private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
} | class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
} | class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
} |
Should there be a check for `headerName.length() >= prefixLength`? | void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
} | values.put(headerName.substring(prefixLength), headerValue); | void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
} | class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
} | class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
@SuppressWarnings("deprecation")
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
} |
Agreed, but at least it is called out and easily worked around. | public JacksonAdapter() {
this.simpleMapper = initializeMapperBuilder(JsonMapper.builder())
.build();
this.headerMapper = initializeMapperBuilder(JsonMapper.builder())
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES)
.build();
this.xmlMapper = initializeMapperBuilder(XmlMapper.builder())
.defaultUseWrapper(false)
.enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
/*
* In Jackson 2.12 the default value of this feature changed from true to false.
* https:
*/
.enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL)
.build();
ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
this.mapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper))
.addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper))
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
} | * https: | public JacksonAdapter() {
this.simpleMapper = initializeMapperBuilder(JsonMapper.builder())
.build();
this.headerMapper = initializeMapperBuilder(JsonMapper.builder())
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES)
.build();
this.xmlMapper = initializeMapperBuilder(XmlMapper.builder())
.defaultUseWrapper(false)
.enable(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
/*
* In Jackson 2.12 the default value of this feature changed from true to false.
* https:
*/
.enable(FromXmlParser.Feature.EMPTY_ELEMENT_AS_NULL)
.build();
this.xmlMapper.coercionConfigDefaults()
.setCoercion(CoercionInputShape.EmptyString, CoercionAction.AsNull);
ObjectMapper flatteningMapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
this.mapper = initializeMapperBuilder(JsonMapper.builder())
.addModule(AdditionalPropertiesSerializer.getModule(flatteningMapper))
.addModule(AdditionalPropertiesDeserializer.getModule(flatteningMapper))
.addModule(FlatteningSerializer.getModule(simpleMapper()))
.addModule(FlatteningDeserializer.getModule(simpleMapper()))
.build();
} | class JacksonAdapter implements SerializerAdapter {
private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$");
private final ClientLogger logger = new ClientLogger(JacksonAdapter.class);
/**
* An instance of {@link ObjectMapper} to serialize/deserialize objects.
*/
private final ObjectMapper mapper;
/**
* An instance of {@link ObjectMapper} that does not do flattening.
*/
private final ObjectMapper simpleMapper;
private final ObjectMapper xmlMapper;
private final ObjectMapper headerMapper;
/*
* The lazily-created serializer for this ServiceClient.
*/
private static SerializerAdapter serializerAdapter;
private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>();
/**
* Creates a new JacksonAdapter instance with default mapper settings.
*/
/**
* Gets a static instance of {@link ObjectMapper} that doesn't handle flattening.
*
* @return an instance of {@link ObjectMapper}.
*/
protected ObjectMapper simpleMapper() {
return simpleMapper;
}
/**
* maintain singleton instance of the default serializer adapter.
*
* @return the default serializer
*/
public static synchronized SerializerAdapter createDefaultSerializerAdapter() {
if (serializerAdapter == null) {
serializerAdapter = new JacksonAdapter();
}
return serializerAdapter;
}
/**
* @return the original serializer type
*/
public ObjectMapper serializer() {
return mapper;
}
@Override
public String serialize(Object object, SerializerEncoding encoding) throws IOException {
if (object == null) {
return null;
}
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serialize(object, encoding, stream);
return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8);
}
@Override
public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException {
if (object == null) {
return;
}
if ((encoding == SerializerEncoding.XML)) {
xmlMapper.writeValue(outputStream, object);
} else {
serializer().writeValue(outputStream, object);
}
}
@Override
public String serializeRaw(Object object) {
if (object == null) {
return null;
}
try {
return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll("");
} catch (IOException ex) {
logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex);
return null;
}
}
@Override
public String serializeList(List<?> list, CollectionFormat format) {
if (list == null) {
return null;
}
List<String> serialized = new ArrayList<>();
for (Object element : list) {
String raw = serializeRaw(element);
serialized.add(raw != null ? raw : "");
}
return String.join(format.getDelimiter(), serialized);
}
@Override
public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException {
if (CoreUtils.isNullOrEmpty(value)) {
return null;
}
return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding);
}
@Override
public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding)
throws IOException {
if (inputStream == null) {
return null;
}
final JavaType javaType = createJavaType(type);
try {
if (encoding == SerializerEncoding.XML) {
return xmlMapper.readValue(inputStream, javaType);
} else {
return serializer().readValue(inputStream, javaType);
}
} catch (JsonParseException jpe) {
throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe));
}
}
@Override
public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException {
if (deserializedHeadersType == null) {
return null;
}
/*
* Do we need to serialize and then deserialize the headers? For now transition to using convertValue as it
* allows for some internal optimizations by Jackson.
*/
T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType));
final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType);
final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields();
/*
* A list containing all handlers for header collections of the header type.
*/
final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>();
/*
* This set is an optimization where we track the first character of all HeaderCollections defined on the
* deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than
* getting the first character.
*/
final Set<Character> headerCollectionsFirstCharacters = new HashSet<>();
/*
* Begin by looping over all declared fields and initializing all header collection information.
*/
for (final Field declaredField : declaredFields) {
if (!declaredField.isAnnotationPresent(HeaderCollection.class)) {
continue;
}
final Type declaredFieldType = declaredField.getGenericType();
if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) {
continue;
}
final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType);
if (mapTypeArguments.length != 2
|| mapTypeArguments[0] != String.class
|| mapTypeArguments[1] != String.class) {
continue;
}
final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class);
final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT);
final int headerCollectionPrefixLength = headerCollectionPrefix.length();
if (headerCollectionPrefixLength == 0) {
continue;
}
headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField));
headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0));
}
/*
* Then loop over all headers and check if they begin with any of the prefixes found.
*/
for (final HttpHeader header : headers) {
String headerNameLower = header.getName().toLowerCase(Locale.ROOT);
for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) {
if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) {
continue;
}
if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) {
headerCollectionHandler.addHeader(header.getName(), header.getValue());
}
}
}
/*
* Finally inject all found header collection values into the deserialized headers.
*/
headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger));
return deserializedHeaders;
}
private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) {
mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)
.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.serializationInclusion(JsonInclude.Include.NON_NULL)
.addModule(new JavaTimeModule())
.addModule(ByteArraySerializer.getModule())
.addModule(Base64UrlSerializer.getModule())
.addModule(DateTimeSerializer.getModule())
.addModule(DateTimeDeserializer.getModule())
.addModule(DateTimeRfc1123Serializer.getModule())
.addModule(DurationSerializer.getModule())
.addModule(HttpHeadersSerializer.getModule())
.addModule(UnixTimeSerializer.getModule())
.visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);
return mapper;
}
private JavaType createJavaType(Type type) {
if (type == null) {
return null;
} else if (type instanceof JavaType) {
return (JavaType) type;
} else if (type instanceof ParameterizedType) {
return typeToJavaTypeCache.computeIfAbsent(type, t -> {
final ParameterizedType parameterizedType = (ParameterizedType) type;
final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length];
for (int i = 0; i != actualTypeArguments.length; i++) {
javaTypeArguments[i] = createJavaType(actualTypeArguments[i]);
}
return mapper.getTypeFactory().constructParametricType((Class<?>) parameterizedType.getRawType(),
javaTypeArguments);
});
} else {
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t));
}
}
/*
* Internal helper class that helps manage converting headers into their header collection.
*/
private static final class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
}
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
}
} | class JacksonAdapter implements SerializerAdapter {
private static final Pattern PATTERN = Pattern.compile("^\"*|\"*$");
private final ClientLogger logger = new ClientLogger(JacksonAdapter.class);
/**
* An instance of {@link ObjectMapper} to serialize/deserialize objects.
*/
private final ObjectMapper mapper;
/**
* An instance of {@link ObjectMapper} that does not do flattening.
*/
private final ObjectMapper simpleMapper;
private final ObjectMapper xmlMapper;
private final ObjectMapper headerMapper;
/*
* The lazily-created serializer for this ServiceClient.
*/
private static SerializerAdapter serializerAdapter;
private final Map<Type, JavaType> typeToJavaTypeCache = new ConcurrentHashMap<>();
/**
* Creates a new JacksonAdapter instance with default mapper settings.
*/
/**
* Gets a static instance of {@link ObjectMapper} that doesn't handle flattening.
*
* @return an instance of {@link ObjectMapper}.
*/
protected ObjectMapper simpleMapper() {
return simpleMapper;
}
/**
* maintain singleton instance of the default serializer adapter.
*
* @return the default serializer
*/
public static synchronized SerializerAdapter createDefaultSerializerAdapter() {
if (serializerAdapter == null) {
serializerAdapter = new JacksonAdapter();
}
return serializerAdapter;
}
/**
* @return the original serializer type
*/
public ObjectMapper serializer() {
return mapper;
}
@Override
public String serialize(Object object, SerializerEncoding encoding) throws IOException {
if (object == null) {
return null;
}
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serialize(object, encoding, stream);
return new String(stream.toByteArray(), 0, stream.size(), StandardCharsets.UTF_8);
}
@Override
public void serialize(Object object, SerializerEncoding encoding, OutputStream outputStream) throws IOException {
if (object == null) {
return;
}
if ((encoding == SerializerEncoding.XML)) {
xmlMapper.writeValue(outputStream, object);
} else {
serializer().writeValue(outputStream, object);
}
}
@Override
public String serializeRaw(Object object) {
if (object == null) {
return null;
}
try {
return PATTERN.matcher(serialize(object, SerializerEncoding.JSON)).replaceAll("");
} catch (IOException ex) {
logger.warning("Failed to serialize {} to JSON.", object.getClass(), ex);
return null;
}
}
@Override
public String serializeList(List<?> list, CollectionFormat format) {
if (list == null) {
return null;
}
List<String> serialized = new ArrayList<>();
for (Object element : list) {
String raw = serializeRaw(element);
serialized.add(raw != null ? raw : "");
}
return String.join(format.getDelimiter(), serialized);
}
@Override
public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException {
if (CoreUtils.isNullOrEmpty(value)) {
return null;
}
return deserialize(new ByteArrayInputStream(value.getBytes(StandardCharsets.UTF_8)), type, encoding);
}
@Override
public <T> T deserialize(InputStream inputStream, final Type type, SerializerEncoding encoding)
throws IOException {
if (inputStream == null) {
return null;
}
final JavaType javaType = createJavaType(type);
try {
if (encoding == SerializerEncoding.XML) {
return xmlMapper.readValue(inputStream, javaType);
} else {
return serializer().readValue(inputStream, javaType);
}
} catch (JsonParseException jpe) {
throw logger.logExceptionAsError(new MalformedValueException(jpe.getMessage(), jpe));
}
}
@Override
public <T> T deserialize(HttpHeaders headers, Type deserializedHeadersType) throws IOException {
if (deserializedHeadersType == null) {
return null;
}
T deserializedHeaders = headerMapper.convertValue(headers, createJavaType(deserializedHeadersType));
final Class<?> deserializedHeadersClass = TypeUtil.getRawClass(deserializedHeadersType);
final Field[] declaredFields = deserializedHeadersClass.getDeclaredFields();
/*
* A list containing all handlers for header collections of the header type.
*/
final List<HeaderCollectionHandler> headerCollectionHandlers = new ArrayList<>();
/*
* This set is an optimization where we track the first character of all HeaderCollections defined on the
* deserialized headers type. This allows us to optimize away startWiths checks which are much more costly than
* getting the first character.
*/
final Set<Character> headerCollectionsFirstCharacters = new HashSet<>();
/*
* Begin by looping over all declared fields and initializing all header collection information.
*/
for (final Field declaredField : declaredFields) {
if (!declaredField.isAnnotationPresent(HeaderCollection.class)) {
continue;
}
final Type declaredFieldType = declaredField.getGenericType();
if (!TypeUtil.isTypeOrSubTypeOf(declaredField.getType(), Map.class)) {
continue;
}
final Type[] mapTypeArguments = TypeUtil.getTypeArguments(declaredFieldType);
if (mapTypeArguments.length != 2
|| mapTypeArguments[0] != String.class
|| mapTypeArguments[1] != String.class) {
continue;
}
final HeaderCollection headerCollectionAnnotation = declaredField.getAnnotation(HeaderCollection.class);
final String headerCollectionPrefix = headerCollectionAnnotation.value().toLowerCase(Locale.ROOT);
final int headerCollectionPrefixLength = headerCollectionPrefix.length();
if (headerCollectionPrefixLength == 0) {
continue;
}
headerCollectionHandlers.add(new HeaderCollectionHandler(headerCollectionPrefix, declaredField));
headerCollectionsFirstCharacters.add(headerCollectionPrefix.charAt(0));
}
/*
* Then loop over all headers and check if they begin with any of the prefixes found.
*/
for (final HttpHeader header : headers) {
String headerNameLower = header.getName().toLowerCase(Locale.ROOT);
/*
* Optimization to skip this header as it doesn't begin with any character starting header collections in
* the deserialized headers type.
*/
if (!headerCollectionsFirstCharacters.contains(headerNameLower.charAt(0))) {
continue;
}
for (HeaderCollectionHandler headerCollectionHandler : headerCollectionHandlers) {
if (headerCollectionHandler.headerStartsWithPrefix(headerNameLower)) {
headerCollectionHandler.addHeader(header.getName(), header.getValue());
}
}
}
/*
* Finally inject all found header collection values into the deserialized headers.
*/
headerCollectionHandlers.forEach(h -> h.injectValuesIntoDeclaringField(deserializedHeaders, logger));
return deserializedHeaders;
}
@SuppressWarnings("deprecation")
private static <S extends MapperBuilder<?, ?>> S initializeMapperBuilder(S mapper) {
mapper.enable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)
.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.serializationInclusion(JsonInclude.Include.NON_NULL)
.addModule(new JavaTimeModule())
.addModule(ByteArraySerializer.getModule())
.addModule(Base64UrlSerializer.getModule())
.addModule(DateTimeSerializer.getModule())
.addModule(DateTimeDeserializer.getModule())
.addModule(DateTimeRfc1123Serializer.getModule())
.addModule(DurationSerializer.getModule())
.addModule(HttpHeadersSerializer.getModule())
.addModule(UnixTimeSerializer.getModule())
.visibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.visibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE)
.visibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);
return mapper;
}
private JavaType createJavaType(Type type) {
if (type == null) {
return null;
} else if (type instanceof JavaType) {
return (JavaType) type;
} else if (type instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) type;
final Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
JavaType[] javaTypeArguments = new JavaType[actualTypeArguments.length];
for (int i = 0; i != actualTypeArguments.length; i++) {
javaTypeArguments[i] = createJavaType(actualTypeArguments[i]);
}
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory()
.constructParametricType((Class<?>) parameterizedType.getRawType(), javaTypeArguments));
} else {
return typeToJavaTypeCache.computeIfAbsent(type, t -> mapper.getTypeFactory().constructType(t));
}
}
/*
* Internal helper class that helps manage converting headers into their header collection.
*/
private static final class HeaderCollectionHandler {
private final String prefix;
private final int prefixLength;
private final Map<String, String> values;
private final Field declaringField;
HeaderCollectionHandler(String prefix, Field declaringField) {
this.prefix = prefix;
this.prefixLength = prefix.length();
this.values = new HashMap<>();
this.declaringField = declaringField;
}
boolean headerStartsWithPrefix(String headerName) {
return headerName.startsWith(prefix);
}
void addHeader(String headerName, String headerValue) {
values.put(headerName.substring(prefixLength), headerValue);
}
@SuppressWarnings("deprecation")
void injectValuesIntoDeclaringField(Object deserializedHeaders, ClientLogger logger) {
/*
* First check if the deserialized headers type has a public setter.
*/
if (usePublicSetter(deserializedHeaders, logger)) {
return;
}
logger.verbose("Failed to find or use public setter to set header collection.");
/*
* Otherwise fallback to setting the field directly.
*/
final boolean declaredFieldAccessibleBackup = declaringField.isAccessible();
try {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(true);
return null;
});
}
declaringField.set(deserializedHeaders, values);
logger.verbose("Set header collection by accessing the field directly.");
} catch (IllegalAccessException ex) {
logger.warning("Failed to inject header collection values into deserialized headers.", ex);
} finally {
if (!declaredFieldAccessibleBackup) {
AccessController.doPrivileged((PrivilegedAction<Object>) () -> {
declaringField.setAccessible(false);
return null;
});
}
}
}
private boolean usePublicSetter(Object deserializedHeaders, ClientLogger logger) {
try {
String potentialSetterName = getPotentialSetterName();
Method setterMethod = deserializedHeaders.getClass().getDeclaredMethod(potentialSetterName, Map.class);
if (Modifier.isPublic(setterMethod.getModifiers())) {
setterMethod.invoke(deserializedHeaders, values);
logger.verbose("User setter %s on class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
}
}
private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
}
}
} |
Should be, this is a field retrieved using the `getDeclaredFields()` method on `Class`, so I would be surprised if a null or empty name was returned | private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
} | String fieldName = declaringField.getName(); | private String getPotentialSetterName() {
String fieldName = declaringField.getName();
return "set" + fieldName.substring(0, 1).toUpperCase(Locale.ROOT) + fieldName.substring(1);
} | class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
} | class %s to set header collection.", potentialSetterName,
deserializedHeaders.getClass().getSimpleName());
return true;
}
return false;
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ignored) {
return false;
} |
Better wording. Response and value are returned from service not coming from caller. We can't blame the caller here. :-) E.g. Service failed to return a response or expected value. | public Response<CommunicationUserIdentifier> createUserWithResponse(Context context) {
context = context == null ? Context.NONE : context;
Response<CommunicationIdentityAccessTokenResult> response =
client.createWithResponseAsync(new CommunicationIdentityCreateRequest(), context).block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Create user response and value cannot be null"));
}
String id = response.getValue().getIdentity().getId();
return new SimpleResponse<CommunicationUserIdentifier>(
response,
new CommunicationUserIdentifier(id));
} | throw logger.logExceptionAsError(new IllegalStateException("Create user response and value cannot be null")); | public Response<CommunicationUserIdentifier> createUserWithResponse(Context context) {
context = context == null ? Context.NONE : context;
Response<CommunicationIdentityAccessTokenResult> response =
client.createWithResponseAsync(new CommunicationIdentityCreateRequest(), context).block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value."));
}
String id = response.getValue().getIdentity().getId();
return new SimpleResponse<CommunicationUserIdentifier>(
response,
new CommunicationUserIdentifier(id));
} | class CommunicationIdentityClient {
private final CommunicationIdentityImpl client;
private final ClientLogger logger = new ClientLogger(CommunicationIdentityClient.class);
CommunicationIdentityClient(CommunicationIdentityClientImpl communicationIdentityClient) {
client = communicationIdentityClient.getCommunicationIdentity();
}
/**
* Creates a new CommunicationUserIdentifier.
*
* @return the created Communication User.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationUserIdentifier createUser() {
CommunicationIdentityAccessTokenResult result = client.create(new CommunicationIdentityCreateRequest());
return new CommunicationUserIdentifier(result.getIdentity().getId());
}
/**
* Creates a new CommunicationUserIdentifier with response.
*
* @param context A {@link Context} representing the request context.
* @return the created Communication User.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Creates a new CommunicationUserIdentifier with token.
*
* @param scopes the list of scopes for the token
* @return the result with created communication user and token
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationUserIdentifierWithTokenResult createUserWithToken(
Iterable<CommunicationTokenScope> scopes) {
Objects.requireNonNull(scopes);
final List<CommunicationTokenScope> scopesInput = new ArrayList<>();
scopes.forEach(scope -> scopesInput.add(scope));
CommunicationIdentityAccessTokenResult result = client.create(
new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput));
return userWithAccessTokenResultConverter(result);
}
/**
* Creates a new CommunicationUserIdentifier with token with response.
*
* @param scopes the list of scopes for the token
* @param context A {@link Context} representing the request context.
* @return the result with created communication user and token
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<CommunicationUserIdentifierWithTokenResult> createUserWithTokenWithResponse(
Iterable<CommunicationTokenScope> scopes, Context context) {
Objects.requireNonNull(scopes);
context = context == null ? Context.NONE : context;
final List<CommunicationTokenScope> scopesInput = new ArrayList<>();
scopes.forEach(scope -> scopesInput.add(scope));
Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync(
new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput), context).block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Create user with token response and value cannot be null"));
}
return new SimpleResponse<CommunicationUserIdentifierWithTokenResult>(
response,
userWithAccessTokenResultConverter(response.getValue()));
}
/**
* Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its
* data.
*
* @param communicationUser The user to be deleted.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Void deleteUser(CommunicationUserIdentifier communicationUser) {
Objects.requireNonNull(communicationUser);
return client.deleteAsync(communicationUser.getId()).block();
}
/**
* Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its
* data with response.
*
* @param communicationUser The user to be deleted.
* @param context A {@link Context} representing the request context.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteUserWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
Objects.requireNonNull(communicationUser);
context = context == null ? Context.NONE : context;
return client.deleteWithResponseAsync(communicationUser.getId(), context).block();
}
/**
* Revokes all the tokens created for an identifier.
*
* @param communicationUser The user to be revoked token.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Void revokeTokens(CommunicationUserIdentifier communicationUser) {
Objects.requireNonNull(communicationUser);
return client.revokeAccessTokensAsync(communicationUser.getId()).block();
}
/**
* Revokes all the tokens created for a user before a specific date.
*
* @param communicationUser The user to be revoked token.
* @param context the context of the request. Can also be null or
* Context.NONE.
* @return the response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> revokeTokensWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
Objects.requireNonNull(communicationUser);
context = context == null ? Context.NONE : context;
return client.revokeAccessTokensWithResponseAsync(communicationUser.getId(), context).block();
}
/**
* Generates a new token for an identity.
*
* @param communicationUser The user to be issued tokens.
* @param scopes The scopes that the token should have.
* @return the issued token.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AccessToken issueToken(CommunicationUserIdentifier communicationUser,
Iterable<CommunicationTokenScope> scopes) {
Objects.requireNonNull(communicationUser);
Objects.requireNonNull(scopes);
final List<CommunicationTokenScope> scopesInput = new ArrayList<>();
scopes.forEach(scope -> scopesInput.add(scope));
CommunicationIdentityAccessToken rawToken = client.issueAccessToken(
communicationUser.getId(),
new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput));
return new AccessToken(rawToken.getToken(), rawToken.getExpiresOn());
}
/**
* Generates a new token for an identity.
*
* @param communicationUser The CommunicationUser from whom to issue a token.
* @param scopes The scopes that the token should have.
* @param context the context of the request. Can also be null or
* Context.NONE.
* @return the created CommunicationUserToken.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AccessToken> issueTokenWithResponse(CommunicationUserIdentifier communicationUser,
Iterable<CommunicationTokenScope> scopes, Context context) {
Objects.requireNonNull(communicationUser);
Objects.requireNonNull(scopes);
context = context == null ? Context.NONE : context;
final List<CommunicationTokenScope> scopesInput = new ArrayList<>();
scopes.forEach(scope -> scopesInput.add(scope));
Response<CommunicationIdentityAccessToken> response = client.issueAccessTokenWithResponseAsync(
communicationUser.getId(),
new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput),
context)
.block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Issue token response and value cannot be null"));
}
return new SimpleResponse<AccessToken>(
response,
new AccessToken(response.getValue().getToken(), response.getValue().getExpiresOn()));
}
private CommunicationUserIdentifierWithTokenResult userWithAccessTokenResultConverter(
CommunicationIdentityAccessTokenResult identityAccessTokenResult) {
CommunicationUserIdentifier user =
new CommunicationUserIdentifier(identityAccessTokenResult.getIdentity().getId());
AccessToken token = new AccessToken(
identityAccessTokenResult.getAccessToken().getToken(),
identityAccessTokenResult.getAccessToken().getExpiresOn());
return new CommunicationUserIdentifierWithTokenResult(user, token);
}
} | class CommunicationIdentityClient {
private final CommunicationIdentityImpl client;
private final ClientLogger logger = new ClientLogger(CommunicationIdentityClient.class);
CommunicationIdentityClient(CommunicationIdentityClientImpl communicationIdentityClient) {
client = communicationIdentityClient.getCommunicationIdentity();
}
/**
* Creates a new CommunicationUserIdentifier.
*
* @return the created Communication User.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationUserIdentifier createUser() {
CommunicationIdentityAccessTokenResult result = client.create(new CommunicationIdentityCreateRequest());
return new CommunicationUserIdentifier(result.getIdentity().getId());
}
/**
* Creates a new CommunicationUserIdentifier with response.
*
* @param context A {@link Context} representing the request context.
* @return the created Communication User.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Creates a new CommunicationUserIdentifier with token.
*
* @param scopes the list of scopes for the token
* @return the result with created communication user and token
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationUserIdentifierWithTokenResult createUserWithToken(
Iterable<CommunicationTokenScope> scopes) {
Objects.requireNonNull(scopes);
final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList());
CommunicationIdentityAccessTokenResult result = client.create(
new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput));
return userWithAccessTokenResultConverter(result);
}
/**
* Creates a new CommunicationUserIdentifier with token with response.
*
* @param scopes the list of scopes for the token
* @param context A {@link Context} representing the request context.
* @return the result with created communication user and token
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<CommunicationUserIdentifierWithTokenResult> createUserWithTokenWithResponse(
Iterable<CommunicationTokenScope> scopes, Context context) {
Objects.requireNonNull(scopes);
context = context == null ? Context.NONE : context;
final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList());
Response<CommunicationIdentityAccessTokenResult> response = client.createWithResponseAsync(
new CommunicationIdentityCreateRequest().setCreateTokenWithScopes(scopesInput), context).block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value."));
}
return new SimpleResponse<CommunicationUserIdentifierWithTokenResult>(
response,
userWithAccessTokenResultConverter(response.getValue()));
}
/**
* Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its
* data.
*
* @param communicationUser The user to be deleted.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Void deleteUser(CommunicationUserIdentifier communicationUser) {
Objects.requireNonNull(communicationUser);
return client.deleteAsync(communicationUser.getId()).block();
}
/**
* Deletes a CommunicationUserIdentifier, revokes its tokens and deletes its
* data with response.
*
* @param communicationUser The user to be deleted.
* @param context A {@link Context} representing the request context.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteUserWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
Objects.requireNonNull(communicationUser);
context = context == null ? Context.NONE : context;
return client.deleteWithResponseAsync(communicationUser.getId(), context).block();
}
/**
* Revokes all the tokens created for an identifier.
*
* @param communicationUser The user to be revoked token.
* @return the response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Void revokeTokens(CommunicationUserIdentifier communicationUser) {
Objects.requireNonNull(communicationUser);
return client.revokeAccessTokensAsync(communicationUser.getId()).block();
}
/**
* Revokes all the tokens created for a user before a specific date.
*
* @param communicationUser The user to be revoked token.
* @param context the context of the request. Can also be null or
* Context.NONE.
* @return the response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> revokeTokensWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
Objects.requireNonNull(communicationUser);
context = context == null ? Context.NONE : context;
return client.revokeAccessTokensWithResponseAsync(communicationUser.getId(), context).block();
}
/**
* Generates a new token for an identity.
*
* @param communicationUser The user to be issued tokens.
* @param scopes The scopes that the token should have.
* @return the issued token.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AccessToken issueToken(CommunicationUserIdentifier communicationUser,
Iterable<CommunicationTokenScope> scopes) {
Objects.requireNonNull(communicationUser);
Objects.requireNonNull(scopes);
final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList());
CommunicationIdentityAccessToken rawToken = client.issueAccessToken(
communicationUser.getId(),
new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput));
return new AccessToken(rawToken.getToken(), rawToken.getExpiresOn());
}
/**
* Generates a new token for an identity.
*
* @param communicationUser The CommunicationUser from whom to issue a token.
* @param scopes The scopes that the token should have.
* @param context the context of the request. Can also be null or
* Context.NONE.
* @return the created CommunicationUserToken.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AccessToken> issueTokenWithResponse(CommunicationUserIdentifier communicationUser,
Iterable<CommunicationTokenScope> scopes, Context context) {
Objects.requireNonNull(communicationUser);
Objects.requireNonNull(scopes);
context = context == null ? Context.NONE : context;
final List<CommunicationTokenScope> scopesInput = StreamSupport.stream(scopes.spliterator(), false).collect(Collectors.toList());
Response<CommunicationIdentityAccessToken> response = client.issueAccessTokenWithResponseAsync(
communicationUser.getId(),
new CommunicationIdentityAccessTokenRequest().setScopes(scopesInput),
context)
.block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value."));
}
return new SimpleResponse<AccessToken>(
response,
new AccessToken(response.getValue().getToken(), response.getValue().getExpiresOn()));
}
private CommunicationUserIdentifierWithTokenResult userWithAccessTokenResultConverter(
CommunicationIdentityAccessTokenResult identityAccessTokenResult) {
CommunicationUserIdentifier user =
new CommunicationUserIdentifier(identityAccessTokenResult.getIdentity().getId());
AccessToken token = new AccessToken(
identityAccessTokenResult.getAccessToken().getToken(),
identityAccessTokenResult.getAccessToken().getExpiresOn());
return new CommunicationUserIdentifierWithTokenResult(user, token);
}
} |
We should remove the commented code? | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, context)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(resultId -> Mono.just(getAnalyzeOperationFluxPage(
resultId, null, null, finalIncludeStatistics, context)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, context)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, context))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters().setModelVersion(
action.getModelVersion() == null ? "latest" : action.getModelVersion()));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion())
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion()));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case CANCELLING:
status = LongRunningOperationStatus.fromString("CANCELLING", false);
break;
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case REJECTED:
status = LongRunningOperationStatus.fromString("REJECTED", true);
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
case FAILED:
status = LongRunningOperationStatus.fromString("FAILED", true);
break;
case PARTIALLY_COMPLETED:
status = LongRunningOperationStatus.fromString("PARTIALLY_COMPLETED", true);
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} | |
Ok. I can remove it. | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, context)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(resultId -> Mono.just(getAnalyzeOperationFluxPage(
resultId, null, null, finalIncludeStatistics, context)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, context)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, context))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters().setModelVersion(
action.getModelVersion() == null ? "latest" : action.getModelVersion()));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion())
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion()));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case CANCELLING:
status = LongRunningOperationStatus.fromString("CANCELLING", false);
break;
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case REJECTED:
status = LongRunningOperationStatus.fromString("REJECTED", true);
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
case FAILED:
status = LongRunningOperationStatus.fromString("FAILED", true);
break;
case PARTIALLY_COMPLETED:
status = LongRunningOperationStatus.fromString("PARTIALLY_COMPLETED", true);
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} | |
not sure i follow this line. ignore if it is a Java thing | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErrofinal List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(neousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | requestStatistics.getDocumentsCount(), requestStatistics.getErrofinal List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(neousDocumentsCount(), | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(resultId -> Mono.just(getAnalyzeOperationFluxPage(
resultId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters().setModelVersion(
action.getModelVersion() == null ? "latest" : action.getModelVersion()));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion())
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion()));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} |
Sorry. Before I push, I made this mistake. Will correct it | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErrofinal List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(neousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | requestStatistics.getDocumentsCount(), requestStatistics.getErrofinal List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(neousDocumentsCount(), | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(resultId -> Mono.just(getAnalyzeOperationFluxPage(
resultId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters().setModelVersion(
action.getModelVersion() == null ? "latest" : action.getModelVersion()));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion())
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion()));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} |
Same for this `action.getModelVersion()...` | private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters().setModelVersion(
action.getModelVersion() == null ? "latest" : action.getModelVersion()));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion())
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(action.getModelVersion() == null
? "latest" : action.getModelVersion()));
return keyPhrasesTask;
}).collect(Collectors.toList()));
} | .setModelVersion(action.getModelVersion() == null | private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(resultId -> Mono.just(getAnalyzeOperationFluxPage(
resultId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
if (options == null) {
options = new AnalyzeBatchActionsOptions();
}
final Context finalContext;
if (context == null) {
finalContext = Context.NONE;
} else {
finalContext = context;
}
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} |
Need to change the return statement here and below | public int getSize() {
return this.sizeInt;
} | return this.sizeInt; | public int getSize() {
int returnValue = this.sizeInt;
return (int) this.sizeLong;
} | class Block {
/*
* The base64 encoded block ID.
*/
@JsonProperty(value = "Name", required = true)
private String name;
/*
* The block size in bytes.
*/
@JsonProperty(value = "Size", required = true)
private long sizeLong;
/*
* The SizeInt property.
*/
@JsonProperty(value = "SizeInt", required = true)
private int sizeInt;
/**
* Get the name property: The base64 encoded block ID.
*
* @return the name value.
*/
public String getName() {
return this.name;
}
/**
* Set the name property: The base64 encoded block ID.
*
* @param name the name value to set.
* @return the Block object itself.
*/
public Block setName(String name) {
this.name = name;
return this;
}
/**
* Get the sizeLong property: The block size in bytes.
*
* @return the sizeLong value.
*/
public long getSizeLong() {
return this.sizeLong;
}
/**
* Set the sizeLong property: The block size in bytes.
*
* @param sizeLong the sizeLong value to set.
* @return the Block object itself.
*/
public Block setSizeLong(long sizeLong) {
this.sizeLong = sizeLong;
return this;
}
/**
* Get the sizeInt property: The SizeInt property.
*
* @return the sizeInt value.
* @deprecated Use {@link
*/
@Deprecated
/**
* Set the sizeInt property: The SizeInt property.
*
* @param sizeInt the sizeInt value to set.
* @return the Block object itself.
* @deprecated Use {@link
*/
@Deprecated
public Block setSize(int sizeInt) {
this.sizeInt = sizeInt;
return this;
}
} | class Block {
/*
* The base64 encoded block ID.
*/
@JsonProperty(value = "Name", required = true)
private String name;
/*
* The block size in bytes.
*/
@JsonProperty(value = "Size", required = true)
private long sizeLong;
/*
* The SizeInt property.
*/
@JsonProperty(value = "SizeInt", required = true)
private int sizeInt;
/**
* Get the name property: The base64 encoded block ID.
*
* @return the name value.
*/
public String getName() {
return this.name;
}
/**
* Set the name property: The base64 encoded block ID.
*
* @param name the name value to set.
* @return the Block object itself.
*/
public Block setName(String name) {
this.name = name;
return this;
}
/**
* Get the sizeLong property: The block size in bytes.
*
* @return the sizeLong value.
*/
public long getSizeLong() {
return this.sizeLong;
}
/**
* Set the sizeLong property: The block size in bytes.
*
* @param sizeLong the sizeLong value to set.
* @return the Block object itself.
*/
public Block setSizeLong(long sizeLong) {
this.sizeLong = sizeLong;
return this;
}
/**
* Get the sizeInt property: The SizeInt property.
*
* @return the sizeInt value.
* @deprecated Use {@link
*/
@Deprecated
/**
* Set the sizeInt property: The SizeInt property.
*
* @param sizeInt the sizeInt value to set.
* @return the Block object itself.
* @deprecated Use {@link
*/
@Deprecated
public Block setSize(int sizeInt) {
this.sizeInt = sizeInt;
Block returnValue = this;
return returnValue.setSizeLong((long) sizeInt);
}
} |
Watch out on FluxUtil consuming the request body to completion. `collectByesInByteBufferStream` will mutate the underlying `ByteBuffer`s' `remaining()` value which could cause `hasRemaining()` to be false when the networking layer attempts to send the request | public void setConfigurationTest() throws InterruptedException {
CountDownLatch appConfigCountDown = new CountDownLatch(1);
CountDownLatch exporterCountDown = new CountDownLatch(2);
ConfigurationClient client = getConfigurationClient(appConfigCountDown);
Tracer tracer = configureAzureMonitorExporter((context, next) -> {
Mono<String> asyncString = FluxUtil.collectBytesInByteBufferStream(context.getHttpRequest().getBody())
.map(bytes -> new String(bytes, StandardCharsets.UTF_8));
asyncString.subscribe(value -> {
if(value.contains("app-config-exporter-testing") && value.contains("\"responseCode\":\"200\"")) {
exporterCountDown.countDown();
}
if(value.contains("AppConfig.setKey")) {
exporterCountDown.countDown();
}
});
return next.process();
});
Span span = tracer.spanBuilder("app-config-exporter-testing").startSpan();
final Scope scope = span.makeCurrent();
try {
client.setConfigurationSetting("hello", "text", "World");
} finally {
span.end();
scope.close();
}
assertTrue(appConfigCountDown.await(1, TimeUnit.SECONDS));
assertTrue(exporterCountDown.await(1, TimeUnit.SECONDS));
} | Mono<String> asyncString = FluxUtil.collectBytesInByteBufferStream(context.getHttpRequest().getBody()) | public void setConfigurationTest() throws InterruptedException {
CountDownLatch appConfigCountDown = new CountDownLatch(1);
CountDownLatch exporterCountDown = new CountDownLatch(2);
ConfigurationClient client = getConfigurationClient(appConfigCountDown);
Tracer tracer = configureAzureMonitorExporter((context, next) -> {
Mono<String> asyncString = FluxUtil.collectBytesInByteBufferStream(context.getHttpRequest().getBody())
.map(bytes -> new String(bytes, StandardCharsets.UTF_8));
asyncString.subscribe(value -> {
if (value.contains("app-config-exporter-testing") && value.contains("\"responseCode\":\"200\"")) {
exporterCountDown.countDown();
}
if (value.contains("AppConfig.setKey")) {
exporterCountDown.countDown();
}
});
return next.process();
});
Span span = tracer.spanBuilder("app-config-exporter-testing").startSpan();
final Scope scope = span.makeCurrent();
try {
client.setConfigurationSetting("hello", "text", "World");
} finally {
span.end();
scope.close();
}
assertTrue(appConfigCountDown.await(1, TimeUnit.SECONDS));
assertTrue(exporterCountDown.await(1, TimeUnit.SECONDS));
} | class AppConfigurationExporterIntegrationTest extends AzureMonitorExporterTestBase {
@Test
private ConfigurationClient getConfigurationClient(CountDownLatch appConfigCountDown) {
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString("{endpoint}")
.addPolicy((context, next) -> {
Optional<Object> data = context.getData(com.azure.core.util.tracing.Tracer.AZ_TRACING_NAMESPACE_KEY);
if (data.isPresent() && data.get().equals("Microsoft.AppConfiguration")) {
appConfigCountDown.countDown();
}
return next.process();
})
.addPolicy(interceptorManager.getRecordPolicy())
.buildClient();
return client;
}
} | class AppConfigurationExporterIntegrationTest extends AzureMonitorExporterTestBase {
@Test
private ConfigurationClient getConfigurationClient(CountDownLatch appConfigCountDown) {
ConfigurationClient client = new ConfigurationClientBuilder()
.connectionString(System.getenv("APP_CONFIG_CONNECTION_STRING"))
.addPolicy((context, next) -> {
Optional<Object> data = context.getData(com.azure.core.util.tracing.Tracer.AZ_TRACING_NAMESPACE_KEY);
if (data.isPresent() && data.get().equals("Microsoft.AppConfiguration")) {
appConfigCountDown.countDown();
}
return next.process();
})
.buildClient();
return client;
}
} |
Is/Should this be defaulted by the exporter when provided null? | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage; | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems).subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
Are we doing recordings for these tests? | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
HttpClient httpClient;
if (getTestMode() == TestMode.RECORD || getTestMode() == TestMode.LIVE) {
httpClient = HttpClient.createDefault();
} else {
httpClient = interceptorManager.getPlaybackClient();
}
HttpPipeline httpPipeline = new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(interceptorManager.getRecordPolicy(), validator).build();
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString("{connection-string}")
.pipeline(httpPipeline)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | httpClient = interceptorManager.getPlaybackClient(); | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString(System.getenv("AZURE_MONITOR_CONNECTION_STRING"))
.addPolicy(validator)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} |
If recording, should these be looking for env vars? | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
HttpClient httpClient;
if (getTestMode() == TestMode.RECORD || getTestMode() == TestMode.LIVE) {
httpClient = HttpClient.createDefault();
} else {
httpClient = interceptorManager.getPlaybackClient();
}
HttpPipeline httpPipeline = new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(interceptorManager.getRecordPolicy(), validator).build();
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString("{connection-string}")
.pipeline(httpPipeline)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | .connectionString("{connection-string}") | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString(System.getenv("AZURE_MONITOR_CONNECTION_STRING"))
.addPolicy(validator)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} |
Currently, this is set up as an integration test only. So, recordings are not included. | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
HttpClient httpClient;
if (getTestMode() == TestMode.RECORD || getTestMode() == TestMode.LIVE) {
httpClient = HttpClient.createDefault();
} else {
httpClient = interceptorManager.getPlaybackClient();
}
HttpPipeline httpPipeline = new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(interceptorManager.getRecordPolicy(), validator).build();
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString("{connection-string}")
.pipeline(httpPipeline)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | httpClient = interceptorManager.getPlaybackClient(); | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString(System.getenv("AZURE_MONITOR_CONNECTION_STRING"))
.addPolicy(validator)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} |
Live test setup will be added as a follow-up. | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
HttpClient httpClient;
if (getTestMode() == TestMode.RECORD || getTestMode() == TestMode.LIVE) {
httpClient = HttpClient.createDefault();
} else {
httpClient = interceptorManager.getPlaybackClient();
}
HttpPipeline httpPipeline = new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(interceptorManager.getRecordPolicy(), validator).build();
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString("{connection-string}")
.pipeline(httpPipeline)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | .connectionString("{connection-string}") | Tracer configureAzureMonitorExporter(HttpPipelinePolicy validator) {
AzureMonitorExporter exporter = new AzureMonitorExporterBuilder()
.connectionString(System.getenv("AZURE_MONITOR_CONNECTION_STRING"))
.addPolicy(validator)
.buildExporter();
OpenTelemetrySdk.getGlobalTracerManagement().addSpanProcessor(SimpleSpanProcessor.create(exporter));
return OpenTelemetrySdk.get().getTracer("Sample");
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} | class AzureMonitorExporterTestBase extends TestBase {
@BeforeEach
public void setupTest(TestInfo testInfo) {
Assumptions.assumeFalse(getTestMode() == TestMode.PLAYBACK, "Skipping playback tests");
}
@Override
@AfterEach
public void teardownTest(TestInfo testInfo) {
}
} |
Sampling will be completely removed as it won't be supported in this exporter. | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage; | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems).subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
now that using async client, ideally would not return constant success value here | public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
} | return CompletableResultCode.ofSuccess(); | public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
we don't have this line in the javaagent exporter, did you find it was needed here? | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | requestData.setResponseCode("200"); | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
Yeah, this was required as the response code was required field to send to service. | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | requestData.setResponseCode("200"); | private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
There are 2 `attr5`s added to this map. The double array will overwrite the string value. | public void addEventWithAttributes() {
final String eventName = "event-0";
Map<String, Object> input = new HashMap<>() {{
put("attr1", "value1");
put("attr2", true);
put("attr3", 1L);
put("attr4", 1.0);
put("attr5", new double[] {1.0, 2.0, 3.0});
put("attr6", null);
}};
openTelemetryTracer.addEvent(eventName, input, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
Attributes attributes = eventData.get(0).getAttributes();
assertEquals(input.size() - 1, attributes.size());
Attributes expectedEventAttrs = Attributes.builder()
.put("attr5", "value1")
.put("attr2", true)
.put("attr3", 1L)
.put("attr4", 1.0)
.put("attr5", new double[] {1.0, 2.0, 3.0})
.build();
expectedEventAttrs.forEach((attributeKey, attrValue) -> assertEquals(attrValue, attributes.get(attributeKey)));
} | .put("attr5", new double[] {1.0, 2.0, 3.0}) | public void addEventWithAttributes() {
final String eventName = "event-0";
Map<String, Object> input = new HashMap<String, Object>() {{
put("attr1", "value1");
put("attr2", true);
put("attr3", 1L);
put("attr4", 1.0);
put("attr5", new double[] {1.0, 2.0, 3.0});
put("attr6", null);
}};
openTelemetryTracer.addEvent(eventName, input, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
Attributes attributes = eventData.get(0).getAttributes();
assertEquals(input.size() - 1, attributes.size());
Attributes expectedEventAttrs = Attributes.builder()
.put("attr1", "value1")
.put("attr2", true)
.put("attr3", 1L)
.put("attr4", 1.0)
.put("attr5", new double[] {1.0, 2.0, 3.0})
.build();
expectedEventAttrs.forEach((attributeKey, attrValue) -> assertEquals(attrValue, attributes.get(attributeKey)));
} | class OpenTelemetryTracerTest {
private static final String METHOD_NAME = "EventHubs.send";
private static final String HOSTNAME_VALUE = "testEventDataNameSpace.servicebus.windows.net";
private static final String ENTITY_PATH_VALUE = "test";
private static final String AZ_NAMESPACE_VALUE = "Microsoft.Eventhub";
private static final Long MESSAGE_ENQUEUED_VALUE = Instant.ofEpochSecond(561639205).getEpochSecond();
private OpenTelemetryTracer openTelemetryTracer;
private Tracer tracer;
private Context tracingContext;
private Span parentSpan;
private Scope scope;
private HashMap<String, Object> expectedAttributeMap = new HashMap<String, Object>() {
{
put(MESSAGING_DESTINATION.getKey(), ENTITY_PATH_VALUE);
put(PEER_SERVICE.getKey(), HOSTNAME_VALUE);
put(AZ_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
}
};
@BeforeEach
public void setUp() {
openTelemetryTracer = new OpenTelemetryTracer();
tracer = OpenTelemetrySdk.builder().build().getTracer("TracerSdkTest");
parentSpan = tracer.spanBuilder(PARENT_SPAN_KEY).startSpan();
scope = parentSpan.makeCurrent();
tracingContext = new Context(PARENT_SPAN_KEY, parentSpan);
}
@AfterEach
public void tearDown() {
scope.close();
tracer = null;
tracingContext = null;
assertNull(tracer);
assertNull(tracingContext);
}
@Test
public void startSpanNullPointerException() {
assertThrows(NullPointerException.class, () -> openTelemetryTracer.start("", null));
}
@Test
public void startSpanParentContextFlowTest() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME,
tracingContext.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE));
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.INTERNAL, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.get(AttributeKey.stringKey(AZ_NAMESPACE_KEY)), AZ_NAMESPACE_VALUE);
}
@Test
public void startSpanTestNoUserParent() {
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, Context.NONE);
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertFalse(recordEventsSpan.getSpanContext().isRemote());
assertNotNull(recordEventsSpan.toSpanData().getParentSpanId());
}
@Test
public void startSpanProcessKindSend() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final SpanBuilder spanBuilder = tracer.spanBuilder(METHOD_NAME);
final Context traceContext = tracingContext
.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)
.addData(HOST_NAME_KEY, HOSTNAME_VALUE)
.addData(SPAN_BUILDER_KEY, spanBuilder)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.SEND);
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.CLIENT, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void startSpanProcessKindMessage() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, tracingContext, ProcessKind.MESSAGE);
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.PRODUCER, recordEventsSpan.toSpanData().getKind());
assertNotNull(updatedContext.getData(SPAN_CONTEXT_KEY).get());
assertNotNull(updatedContext.getData(DIAGNOSTIC_ID_KEY).get());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void startSpanProcessKindProcess() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context traceContext = tracingContext
.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)
.addData(HOST_NAME_KEY, HOSTNAME_VALUE)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(MESSAGE_ENQUEUED_TIME, MESSAGE_ENQUEUED_VALUE);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);
assertFalse(tracingContext.getData(SPAN_CONTEXT_KEY).isPresent(),
"When no parent span passed in context information");
assertSpanWithExplicitParent(updatedContext, parentSpanId);
assertNotNull(updatedContext.getData(SCOPE_KEY).get());
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.CONSUMER, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
expectedAttributeMap.put(MESSAGE_ENQUEUED_TIME, MESSAGE_ENQUEUED_VALUE);
expectedAttributeMap.put(AZ_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void getSpanBuilderTest() {
final Context updatedContext = openTelemetryTracer.getSharedSpanBuilder(METHOD_NAME, Context.NONE);
assertTrue(updatedContext.getData(SPAN_BUILDER_KEY).isPresent());
}
@Test
public void startProcessSpanWithRemoteParent() {
final Span testSpan = tracer.spanBuilder("child-span").startSpan();
final String testSpanId = testSpan.getSpanContext().getSpanIdAsHexString();
final SpanContext spanContext = SpanContext.createFromRemoteParent(
testSpan.getSpanContext().getTraceIdAsHexString(),
testSpan.getSpanContext().getSpanIdAsHexString(),
testSpan.getSpanContext().getTraceFlags(),
testSpan.getSpanContext().getTraceState());
final Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, spanContext);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);
assertNotNull(updatedContext.getData(SCOPE_KEY).get());
assertSpanWithRemoteParent(updatedContext, testSpanId);
}
@Test
public void startSpanOverloadNullPointerException() {
assertThrows(NullPointerException.class, () ->
openTelemetryTracer.start("", Context.NONE, null));
}
@Test
public void addLinkTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
Span toLinkSpan = tracer.spanBuilder("new test span").startSpan();
Context spanContext = new Context(
SPAN_CONTEXT_KEY, toLinkSpan.getSpanContext());
LinkData expectedLink = LinkData.create(toLinkSpan.getSpanContext());
openTelemetryTracer.addLink(spanContext.addData(SPAN_BUILDER_KEY, span));
ReadableSpan span1 = (ReadableSpan) span.startSpan();
LinkData createdLink = span1.toSpanData().getLinks().get(0);
assertEquals(1, span1.toSpanData().getLinks().size());
assertEquals(expectedLink.getSpanContext().getTraceIdAsHexString(),
createdLink.getSpanContext().getTraceIdAsHexString());
assertEquals(expectedLink.getSpanContext().getSpanIdAsHexString(),
createdLink.getSpanContext().getSpanIdAsHexString());
}
@Test
public void addLinkNoSpanContextTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
openTelemetryTracer.addLink(new Context(SPAN_BUILDER_KEY, span));
ReadableSpan span1 = (ReadableSpan) span.startSpan();
assertEquals(span1.toSpanData().getLinks().size(), 0);
}
@Test
public void addLinkNoSpanToLinkTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
openTelemetryTracer.addLink(Context.NONE);
ReadableSpan span1 = (ReadableSpan) span.startSpan();
assertEquals(span1.toSpanData().getLinks().size(), 0);
}
@Test
public void endSpanNoSuccessErrorMessageTest() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
openTelemetryTracer.end(null, null, tracingContext);
assertEquals(UNSET, recordEventsSpan.toSpanData().getStatus().getStatusCode());
}
@Test
public void endSpanErrorMessageTest() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
final String throwableMessage = "custom error message";
openTelemetryTracer.end(null, new Throwable(throwableMessage), tracingContext);
assertEquals(StatusCode.ERROR, recordEventsSpan.toSpanData().getStatus().getStatusCode());
List<EventData> events = recordEventsSpan.toSpanData().getEvents();
assertEquals(1, events.size());
EventData event = events.get(0);
assertEquals("exception", event.getName());
assertEquals("custom error message", event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE));
}
@Test
public void endSpanTestThrowableResponseCode() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
openTelemetryTracer.end(404, new Throwable("this is an exception"), tracingContext);
assertEquals(StatusCode.ERROR, recordEventsSpan.toSpanData().getStatus().getStatusCode());
assertEquals("Not Found", recordEventsSpan.toSpanData().getStatus().getDescription());
List<EventData> events = recordEventsSpan.toSpanData().getEvents();
assertEquals(1, events.size());
EventData event = events.get(0);
assertEquals("exception", event.getName());
}
@Test
public void setAttributeTest() {
final String firstKey = "first-key";
final String firstKeyValue = "first-value";
Context spanContext = openTelemetryTracer.start(METHOD_NAME, tracingContext);
final ReadableSpan recordEventsSpan = (ReadableSpan) spanContext.getData(PARENT_SPAN_KEY).get();
openTelemetryTracer.setAttribute(firstKey, firstKeyValue, spanContext);
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.get(AttributeKey.stringKey(firstKey)), firstKeyValue);
}
@Test
public void setAttributeNoSpanTest() {
final String firstKey = "first-key";
final String firstKeyValue = "first-value";
Context spanContext = openTelemetryTracer.start(METHOD_NAME, tracingContext);
final ReadableSpan recordEventsSpan = (ReadableSpan) spanContext.getData(PARENT_SPAN_KEY).get();
openTelemetryTracer.setAttribute(firstKey, firstKeyValue, Context.NONE);
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.size(), 0);
}
@Test
public void setSpanNameTest() {
Context initialContext = Context.NONE;
final String spanName = "child-span";
Context updatedContext = openTelemetryTracer.setSpanName(spanName, initialContext);
assertEquals(updatedContext.getData(USER_SPAN_NAME_KEY).get(), spanName);
}
@Test
public void extractContextValidDiagnosticId() {
String diagnosticId = "00-bc7293302f5dc6de8a2372491092df95-dfd6fee494751d3f-01";
SpanContext validSpanContext = SpanContext.create(
TraceId.bytesToHex(TraceId.bytesFromHex(diagnosticId, 3)),
SpanId.bytesToHex(SpanId.bytesFromHex(diagnosticId, 36)),
TraceFlags.byteFromHex(diagnosticId, 53),
TraceState.builder().build());
Context updatedContext = openTelemetryTracer.extractContext(diagnosticId, Context.NONE);
Optional<Object> spanContextOptional = updatedContext.getData(SPAN_CONTEXT_KEY);
assertNotNull(spanContextOptional);
SpanContext spanContext = (SpanContext) spanContextOptional.get();
assertEquals(spanContext, validSpanContext);
}
@Test
public void extractContextInvalidDiagnosticId() {
String diagnosticId = "00000000000000000000000000000000";
SpanContext invalidSpanContext = SpanContext.create(
TraceId.getInvalid(),
SpanId.getInvalid(),
TraceFlags.getDefault(),
TraceState.getDefault()
);
Context updatedContext = openTelemetryTracer.extractContext(diagnosticId, Context.NONE);
Optional<Object> spanContextOptional = updatedContext.getData(SPAN_CONTEXT_KEY);
assertNotNull(spanContextOptional);
SpanContext spanContext = (SpanContext) spanContextOptional.get();
assertEquals(spanContext, invalidSpanContext);
}
@Test
public void addEventWithNonNullEventName() {
final String eventName = "event-0";
openTelemetryTracer.addEvent(eventName, null, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
}
@Test
@Test
public void addEventWithTimeSpecification() {
final String eventName = "event-0";
OffsetDateTime eventTime = OffsetDateTime.parse("2021-01-01T18:35:24.00Z");
openTelemetryTracer.addEvent(eventName, null, eventTime);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
assertEquals(eventTime,
OffsetDateTime.ofInstant(Instant.ofEpochMilli(eventData.get(0).getEpochNanos()/1000000), ZoneOffset.UTC));
}
@Test
public void addEventAfterSpanEnd() {
final String eventName = "event-0";
parentSpan.end();
openTelemetryTracer.addEvent(eventName, null, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(0, eventData.size());
}
private static void assertSpanWithExplicitParent(Context updatedContext, String parentSpanId) {
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY).get());
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertFalse(recordEventsSpan.toSpanData().getParentSpanContext().isRemote());
assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());
}
private static void assertSpanWithRemoteParent(Context updatedContext, String parentSpanId) {
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY).get());
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertEquals(Span.Kind.CONSUMER, recordEventsSpan.toSpanData().getKind());
assertTrue(recordEventsSpan.toSpanData().getParentSpanContext().isRemote());
assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());
}
private static void verifySpanAttributes(Map<String, Object> expectedMap, Attributes actualAttributeMap) {
actualAttributeMap.forEach((attributeKey, attributeValue) ->
assertEquals(expectedMap.get(attributeKey.getKey()), attributeValue));
}
} | class OpenTelemetryTracerTest {
private static final String METHOD_NAME = "EventHubs.send";
private static final String HOSTNAME_VALUE = "testEventDataNameSpace.servicebus.windows.net";
private static final String ENTITY_PATH_VALUE = "test";
private static final String AZ_NAMESPACE_VALUE = "Microsoft.Eventhub";
private static final Long MESSAGE_ENQUEUED_VALUE = Instant.ofEpochSecond(561639205).getEpochSecond();
private OpenTelemetryTracer openTelemetryTracer;
private Tracer tracer;
private Context tracingContext;
private Span parentSpan;
private Scope scope;
private HashMap<String, Object> expectedAttributeMap = new HashMap<String, Object>() {
{
put(MESSAGING_DESTINATION.getKey(), ENTITY_PATH_VALUE);
put(PEER_SERVICE.getKey(), HOSTNAME_VALUE);
put(AZ_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
}
};
@BeforeEach
public void setUp() {
openTelemetryTracer = new OpenTelemetryTracer();
tracer = OpenTelemetrySdk.builder().build().getTracer("TracerSdkTest");
parentSpan = tracer.spanBuilder(PARENT_SPAN_KEY).startSpan();
scope = parentSpan.makeCurrent();
tracingContext = new Context(PARENT_SPAN_KEY, parentSpan);
}
@AfterEach
public void tearDown() {
scope.close();
tracer = null;
tracingContext = null;
assertNull(tracer);
assertNull(tracingContext);
}
@Test
public void startSpanNullPointerException() {
assertThrows(NullPointerException.class, () -> openTelemetryTracer.start("", null));
}
@Test
public void startSpanParentContextFlowTest() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME,
tracingContext.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE));
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.INTERNAL, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.get(AttributeKey.stringKey(AZ_NAMESPACE_KEY)), AZ_NAMESPACE_VALUE);
}
@Test
public void startSpanTestNoUserParent() {
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, Context.NONE);
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY));
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertFalse(recordEventsSpan.getSpanContext().isRemote());
assertNotNull(recordEventsSpan.toSpanData().getParentSpanId());
}
@Test
public void startSpanProcessKindSend() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final SpanBuilder spanBuilder = tracer.spanBuilder(METHOD_NAME);
final Context traceContext = tracingContext
.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)
.addData(HOST_NAME_KEY, HOSTNAME_VALUE)
.addData(SPAN_BUILDER_KEY, spanBuilder)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.SEND);
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.CLIENT, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void startSpanProcessKindMessage() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, tracingContext, ProcessKind.MESSAGE);
assertSpanWithExplicitParent(updatedContext, parentSpanId);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.PRODUCER, recordEventsSpan.toSpanData().getKind());
assertNotNull(updatedContext.getData(SPAN_CONTEXT_KEY).get());
assertNotNull(updatedContext.getData(DIAGNOSTIC_ID_KEY).get());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void startSpanProcessKindProcess() {
final String parentSpanId = parentSpan.getSpanContext().getSpanIdAsHexString();
final Context traceContext = tracingContext
.addData(ENTITY_PATH_KEY, ENTITY_PATH_VALUE)
.addData(HOST_NAME_KEY, HOSTNAME_VALUE)
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(MESSAGE_ENQUEUED_TIME, MESSAGE_ENQUEUED_VALUE);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);
assertFalse(tracingContext.getData(SPAN_CONTEXT_KEY).isPresent(),
"When no parent span passed in context information");
assertSpanWithExplicitParent(updatedContext, parentSpanId);
assertNotNull(updatedContext.getData(SCOPE_KEY).get());
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(Span.Kind.CONSUMER, recordEventsSpan.toSpanData().getKind());
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
expectedAttributeMap.put(MESSAGE_ENQUEUED_TIME, MESSAGE_ENQUEUED_VALUE);
expectedAttributeMap.put(AZ_NAMESPACE_KEY, AZ_NAMESPACE_VALUE);
verifySpanAttributes(expectedAttributeMap, attributeMap);
}
@Test
public void getSpanBuilderTest() {
final Context updatedContext = openTelemetryTracer.getSharedSpanBuilder(METHOD_NAME, Context.NONE);
assertTrue(updatedContext.getData(SPAN_BUILDER_KEY).isPresent());
}
@Test
public void startProcessSpanWithRemoteParent() {
final Span testSpan = tracer.spanBuilder("child-span").startSpan();
final String testSpanId = testSpan.getSpanContext().getSpanIdAsHexString();
final SpanContext spanContext = SpanContext.createFromRemoteParent(
testSpan.getSpanContext().getTraceIdAsHexString(),
testSpan.getSpanContext().getSpanIdAsHexString(),
testSpan.getSpanContext().getTraceFlags(),
testSpan.getSpanContext().getTraceState());
final Context traceContext = tracingContext.addData(SPAN_CONTEXT_KEY, spanContext);
final Context updatedContext = openTelemetryTracer.start(METHOD_NAME, traceContext, ProcessKind.PROCESS);
assertNotNull(updatedContext.getData(SCOPE_KEY).get());
assertSpanWithRemoteParent(updatedContext, testSpanId);
}
@Test
public void startSpanOverloadNullPointerException() {
assertThrows(NullPointerException.class, () ->
openTelemetryTracer.start("", Context.NONE, null));
}
@Test
public void addLinkTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
Span toLinkSpan = tracer.spanBuilder("new test span").startSpan();
Context spanContext = new Context(
SPAN_CONTEXT_KEY, toLinkSpan.getSpanContext());
LinkData expectedLink = LinkData.create(toLinkSpan.getSpanContext());
openTelemetryTracer.addLink(spanContext.addData(SPAN_BUILDER_KEY, span));
ReadableSpan span1 = (ReadableSpan) span.startSpan();
LinkData createdLink = span1.toSpanData().getLinks().get(0);
assertEquals(1, span1.toSpanData().getLinks().size());
assertEquals(expectedLink.getSpanContext().getTraceIdAsHexString(),
createdLink.getSpanContext().getTraceIdAsHexString());
assertEquals(expectedLink.getSpanContext().getSpanIdAsHexString(),
createdLink.getSpanContext().getSpanIdAsHexString());
}
@Test
public void addLinkNoSpanContextTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
openTelemetryTracer.addLink(new Context(SPAN_BUILDER_KEY, span));
ReadableSpan span1 = (ReadableSpan) span.startSpan();
assertEquals(span1.toSpanData().getLinks().size(), 0);
}
@Test
public void addLinkNoSpanToLinkTest() {
SpanBuilder span = tracer.spanBuilder("parent-span");
openTelemetryTracer.addLink(Context.NONE);
ReadableSpan span1 = (ReadableSpan) span.startSpan();
assertEquals(span1.toSpanData().getLinks().size(), 0);
}
@Test
public void endSpanNoSuccessErrorMessageTest() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
openTelemetryTracer.end(null, null, tracingContext);
assertEquals(UNSET, recordEventsSpan.toSpanData().getStatus().getStatusCode());
}
@Test
public void endSpanErrorMessageTest() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
final String throwableMessage = "custom error message";
openTelemetryTracer.end(null, new Throwable(throwableMessage), tracingContext);
assertEquals(StatusCode.ERROR, recordEventsSpan.toSpanData().getStatus().getStatusCode());
List<EventData> events = recordEventsSpan.toSpanData().getEvents();
assertEquals(1, events.size());
EventData event = events.get(0);
assertEquals("exception", event.getName());
assertEquals("custom error message", event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE));
}
@Test
public void endSpanTestThrowableResponseCode() {
final ReadableSpan recordEventsSpan = (ReadableSpan) Span.current();
openTelemetryTracer.end(404, new Throwable("this is an exception"), tracingContext);
assertEquals(StatusCode.ERROR, recordEventsSpan.toSpanData().getStatus().getStatusCode());
assertEquals("Not Found", recordEventsSpan.toSpanData().getStatus().getDescription());
List<EventData> events = recordEventsSpan.toSpanData().getEvents();
assertEquals(1, events.size());
EventData event = events.get(0);
assertEquals("exception", event.getName());
}
@Test
public void setAttributeTest() {
final String firstKey = "first-key";
final String firstKeyValue = "first-value";
Context spanContext = openTelemetryTracer.start(METHOD_NAME, tracingContext);
final ReadableSpan recordEventsSpan = (ReadableSpan) spanContext.getData(PARENT_SPAN_KEY).get();
openTelemetryTracer.setAttribute(firstKey, firstKeyValue, spanContext);
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.get(AttributeKey.stringKey(firstKey)), firstKeyValue);
}
@Test
public void setAttributeNoSpanTest() {
final String firstKey = "first-key";
final String firstKeyValue = "first-value";
Context spanContext = openTelemetryTracer.start(METHOD_NAME, tracingContext);
final ReadableSpan recordEventsSpan = (ReadableSpan) spanContext.getData(PARENT_SPAN_KEY).get();
openTelemetryTracer.setAttribute(firstKey, firstKeyValue, Context.NONE);
final Attributes attributeMap = recordEventsSpan.toSpanData().getAttributes();
assertEquals(attributeMap.size(), 0);
}
@Test
public void setSpanNameTest() {
Context initialContext = Context.NONE;
final String spanName = "child-span";
Context updatedContext = openTelemetryTracer.setSpanName(spanName, initialContext);
assertEquals(updatedContext.getData(USER_SPAN_NAME_KEY).get(), spanName);
}
@Test
public void extractContextValidDiagnosticId() {
String diagnosticId = "00-bc7293302f5dc6de8a2372491092df95-dfd6fee494751d3f-01";
SpanContext validSpanContext = SpanContext.create(
TraceId.bytesToHex(TraceId.bytesFromHex(diagnosticId, 3)),
SpanId.bytesToHex(SpanId.bytesFromHex(diagnosticId, 36)),
TraceFlags.byteFromHex(diagnosticId, 53),
TraceState.builder().build());
Context updatedContext = openTelemetryTracer.extractContext(diagnosticId, Context.NONE);
Optional<Object> spanContextOptional = updatedContext.getData(SPAN_CONTEXT_KEY);
assertNotNull(spanContextOptional);
SpanContext spanContext = (SpanContext) spanContextOptional.get();
assertEquals(spanContext, validSpanContext);
}
@Test
public void extractContextInvalidDiagnosticId() {
String diagnosticId = "00000000000000000000000000000000";
SpanContext invalidSpanContext = SpanContext.create(
TraceId.getInvalid(),
SpanId.getInvalid(),
TraceFlags.getDefault(),
TraceState.getDefault()
);
Context updatedContext = openTelemetryTracer.extractContext(diagnosticId, Context.NONE);
Optional<Object> spanContextOptional = updatedContext.getData(SPAN_CONTEXT_KEY);
assertNotNull(spanContextOptional);
SpanContext spanContext = (SpanContext) spanContextOptional.get();
assertEquals(spanContext, invalidSpanContext);
}
@Test
public void addEventWithNonNullEventName() {
final String eventName = "event-0";
openTelemetryTracer.addEvent(eventName, null, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
}
@Test
@Test
public void addEventWithTimeSpecification() {
final String eventName = "event-0";
OffsetDateTime eventTime = OffsetDateTime.parse("2021-01-01T18:35:24.00Z");
openTelemetryTracer.addEvent(eventName, null, eventTime);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(1, eventData.size());
assertEquals(eventName, eventData.get(0).getName());
assertEquals(eventTime,
OffsetDateTime.ofInstant(Instant.ofEpochMilli(eventData.get(0).getEpochNanos() / 1000000), ZoneOffset.UTC));
}
@Test
public void addEventAfterSpanEnd() {
final String eventName = "event-0";
parentSpan.end();
openTelemetryTracer.addEvent(eventName, null, null);
final ReadableSpan recordEventsSpan = (ReadableSpan) tracingContext.getData(PARENT_SPAN_KEY).get();
List<EventData> eventData = recordEventsSpan.toSpanData().getEvents();
assertNotNull(eventData);
assertEquals(0, eventData.size());
}
private static void assertSpanWithExplicitParent(Context updatedContext, String parentSpanId) {
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY).get());
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertFalse(recordEventsSpan.toSpanData().getParentSpanContext().isRemote());
assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());
}
private static void assertSpanWithRemoteParent(Context updatedContext, String parentSpanId) {
assertNotNull(updatedContext.getData(PARENT_SPAN_KEY).get());
assertTrue(updatedContext.getData(PARENT_SPAN_KEY).get() instanceof ReadableSpan);
final ReadableSpan recordEventsSpan =
(ReadableSpan) updatedContext.getData(PARENT_SPAN_KEY).get();
assertEquals(METHOD_NAME, recordEventsSpan.getName());
assertEquals(Span.Kind.CONSUMER, recordEventsSpan.toSpanData().getKind());
assertTrue(recordEventsSpan.toSpanData().getParentSpanContext().isRemote());
assertEquals(parentSpanId, recordEventsSpan.toSpanData().getParentSpanId());
}
private static void verifySpanAttributes(Map<String, Object> expectedMap, Attributes actualAttributeMap) {
actualAttributeMap.forEach((attributeKey, attributeValue) ->
assertEquals(expectedMap.get(attributeKey.getKey()), attributeValue));
}
} |
Should we just turn this into a single `if/else` | private ConnectionOptions getConnectionOptions() {
configuration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration;
if (credentials == null) {
final String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(configuration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
final String hostname = customEndpointAddress != null ? customEndpointAddress.getHost() : null;
final Integer port = customEndpointAddress != null && customEndpointAddress.getPort() != -1
? customEndpointAddress.getPort()
: null;
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport, retryOptions,
proxyOptions, scheduler, options, verificationMode, hostname, port);
} | : null; | private ConnectionOptions getConnectionOptions() {
configuration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration;
if (credentials == null) {
final String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(configuration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
if (customEndpointAddress == null) {
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport,
retryOptions, proxyOptions, scheduler, options, verificationMode);
} else {
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport,
retryOptions, proxyOptions, scheduler, options, verificationMode, customEndpointAddress.getHost(),
customEndpointAddress.getPort());
}
} | class EventHubClientBuilder {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private final ClientLogger logger = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.elastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
logger.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose);
}
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
logger.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
logger.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
logger.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
logger.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE);
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(logger.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
logger.info("connectionId[{}]: Emitting a single connection.", connectionId);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, eventHubName, provider, handlerProvider, tokenManagerProvider, messageSerializer,
product, clientVersion);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), eventHubName, connectionOptions.getRetry()));
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress);
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
return new ProxyOptions(authentication, new Proxy(coreProxyOptions.getType().toProxyType(),
coreProxyOptions.getAddress()), coreProxyOptions.getUsername(), coreProxyOptions.getPassword());
}
}
} | class EventHubClientBuilder {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private final ClientLogger logger = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.elastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
logger.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose);
}
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
logger.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
logger.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
logger.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
logger.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE);
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(logger.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
logger.info("connectionId[{}]: Emitting a single connection.", connectionId);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, eventHubName, provider, handlerProvider, tokenManagerProvider, messageSerializer,
product, clientVersion);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), eventHubName, connectionOptions.getRetry()));
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress);
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
return new ProxyOptions(authentication, new Proxy(coreProxyOptions.getType().toProxyType(),
coreProxyOptions.getAddress()), coreProxyOptions.getUsername(), coreProxyOptions.getPassword());
}
}
} |
There is another `&& customEndpointAddress().getport()` check here, that's why I didn't. | private ConnectionOptions getConnectionOptions() {
configuration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration;
if (credentials == null) {
final String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(configuration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
final String hostname = customEndpointAddress != null ? customEndpointAddress.getHost() : null;
final Integer port = customEndpointAddress != null && customEndpointAddress.getPort() != -1
? customEndpointAddress.getPort()
: null;
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport, retryOptions,
proxyOptions, scheduler, options, verificationMode, hostname, port);
} | : null; | private ConnectionOptions getConnectionOptions() {
configuration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration;
if (credentials == null) {
final String connectionString = configuration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(configuration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
if (customEndpointAddress == null) {
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport,
retryOptions, proxyOptions, scheduler, options, verificationMode);
} else {
return new ConnectionOptions(fullyQualifiedNamespace, credentials, authorizationType, transport,
retryOptions, proxyOptions, scheduler, options, verificationMode, customEndpointAddress.getHost(),
customEndpointAddress.getPort());
}
} | class EventHubClientBuilder {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private final ClientLogger logger = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.elastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
logger.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose);
}
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
logger.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
logger.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
logger.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
logger.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE);
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(logger.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
logger.info("connectionId[{}]: Emitting a single connection.", connectionId);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, eventHubName, provider, handlerProvider, tokenManagerProvider, messageSerializer,
product, clientVersion);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), eventHubName, connectionOptions.getRetry()));
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress);
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
return new ProxyOptions(authentication, new Proxy(coreProxyOptions.getType().toProxyType(),
coreProxyOptions.getAddress()), coreProxyOptions.getUsername(), coreProxyOptions.getPassword());
}
}
} | class EventHubClientBuilder {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private final ClientLogger logger = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.elastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
logger.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose);
}
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
logger.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
logger.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
logger.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
logger.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE);
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(logger.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
logger.info("connectionId[{}]: Emitting a single connection.", connectionId);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, eventHubName, provider, handlerProvider, tokenManagerProvider, messageSerializer,
product, clientVersion);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), eventHubName, connectionOptions.getRetry()));
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress);
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
return new ProxyOptions(authentication, new Proxy(coreProxyOptions.getType().toProxyType(),
coreProxyOptions.getAddress()), coreProxyOptions.getUsername(), coreProxyOptions.getPassword());
}
}
} |
is the breaking line here intended? | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String businessCardUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/businessCards/businessCard.jpg";
SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> recognizeBusinessCardPoller =
client.beginRecognizeBusinessCardsFromUrl(businessCardUrl);
List<RecognizedForm> businessCardPageResults = recognizeBusinessCardPoller.getFinalResult();
for (int i = 0; i < businessCardPageResults.size(); i++) {
RecognizedForm recognizedForm = businessCardPageResults.get(i);
Map<String, FormField> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized business card info for page %d -----------%n", i);
FormField contactNamesFormField = recognizedFields.get("ContactNames");
if (contactNamesFormField != null) {
if (FieldValueType.LIST == contactNamesFormField.getValue().getValueType()) {
List<FormField> contactNamesList = contactNamesFormField.getValue().asList();
contactNamesList.stream()
.filter(contactName -> FieldValueType.MAP == contactName.getValue().getValueType())
.map(contactName -> {
System.out.printf("Contact name: %s%n", contactName.getValueData().getText());
return contactName.getValue().asMap();
})
.forEach(contactNamesMap -> contactNamesMap.forEach((key, contactName) -> {
if ("FirstName".equals(key)) {
if (FieldValueType.STRING == contactName.getValue().getValueType()) {
String firstName = contactName.getValue().asString();
System.out.printf("\tFirst Name: %s, confidence: %.2f%n",
firstName, contactName.getConfidence());
}
}
if ("LastName".equals(key)) {
if (FieldValueType.STRING == contactName.getValue().getValueType()) {
String lastName = contactName.getValue().asString();
System.out.printf("\tLast Name: %s, confidence: %.2f%n",
lastName, contactName.getConfidence());
}
}
}));
}
}
FormField jobTitles = recognizedFields.get("JobTitles");
if (jobTitles != null) {
if (FieldValueType.LIST == jobTitles.getValue().getValueType()) {
List<FormField> jobTitlesItems = jobTitles.getValue().asList();
jobTitlesItems.forEach(jobTitlesItem -> {
if (FieldValueType.STRING == jobTitlesItem.getValue().getValueType()) {
String jobTitle = jobTitlesItem.getValue().asString();
System.out.printf("Job Title: %s, confidence: %.2f%n",
jobTitle, jobTitlesItem.getConfidence());
}
});
}
}
FormField departments = recognizedFields.get("Departments");
if (departments != null) {
if (FieldValueType.LIST == departments.getValue().getValueType()) {
List<FormField> departmentsItems = departments.getValue().asList();
departmentsItems.forEach(departmentsItem -> {
if (FieldValueType.STRING == departmentsItem.getValue().getValueType()) {
String department = departmentsItem.getValue().asString();
System.out.printf("Department: %s, confidence: %.2f%n",
department, departmentsItem.getConfidence());
}
});
}
}
FormField emails = recognizedFields.get("Emails");
if (emails != null) {
if (FieldValueType.LIST == emails.getValue().getValueType()) {
List<FormField> emailsItems = emails.getValue().asList();
emailsItems.forEach(emailsItem -> {
if (FieldValueType.STRING == emailsItem.getValue().getValueType()) {
String email = emailsItem.getValue().asString();
System.out.printf("Email: %s, confidence: %.2f%n", email, emailsItem.getConfidence());
}
});
}
}
FormField websites = recognizedFields.get("Websites");
if (websites != null) {
if (FieldValueType.LIST == websites.getValue().getValueType()) {
List<FormField> websitesItems = websites.getValue().asList();
websitesItems.forEach(websitesItem -> {
if (FieldValueType.STRING == websitesItem.getValue().getValueType()) {
String website = websitesItem.getValue().asString();
System.out.printf("Web site: %s, confidence: %.2f%n",
website, websitesItem.getConfidence());
}
});
}
}
FormField mobilePhones = recognizedFields.get("MobilePhones");
if (mobilePhones != null) {
if (FieldValueType.LIST == mobilePhones.getValue().getValueType()) {
List<FormField> mobilePhonesItems = mobilePhones.getValue().asList();
mobilePhonesItems.forEach(mobilePhonesItem -> {
if (FieldValueType.PHONE_NUMBER == mobilePhonesItem.getValue().getValueType()) {
String mobilePhoneNumber = mobilePhonesItem.getValue().asPhoneNumber();
System.out.printf("Mobile phone number: %s, confidence: %.2f%n",
mobilePhoneNumber, mobilePhonesItem.getConfidence());
}
});
}
}
FormField otherPhones = recognizedFields.get("OtherPhones");
if (otherPhones != null) {
if (FieldValueType.LIST == otherPhones.getValue().getValueType()) {
List<FormField> otherPhonesItems = otherPhones.getValue().asList();
otherPhonesItems.forEach(otherPhonesItem -> {
if (FieldValueType.PHONE_NUMBER == otherPhonesItem.getValue().getValueType()) {
String otherPhoneNumber = otherPhonesItem.getValue().asPhoneNumber();
System.out.printf("Other phone number: %s, confidence: %.2f%n",
otherPhoneNumber, otherPhonesItem.getConfidence());
}
});
}
}
FormField faxes = recognizedFields.get("Faxes");
if (faxes != null) {
if (FieldValueType.LIST == faxes.getValue().getValueType()) {
List<FormField> faxesItems = faxes.getValue().asList();
faxesItems.forEach(faxesItem -> {
if (FieldValueType.PHONE_NUMBER == faxesItem.getValue().getValueType()) {
String faxPhoneNumber = faxesItem.getValue().asPhoneNumber();
System.out.printf("Fax phone number: %s, confidence: %.2f%n",
faxPhoneNumber, faxesItem.getConfidence());
}
});
}
}
FormField addresses = recognizedFields.get("Addresses");
if (addresses != null) {
if (FieldValueType.LIST == addresses.getValue().getValueType()) {
List<FormField> addressesItems = addresses.getValue().asList();
addressesItems.forEach(addressesItem -> {
if (FieldValueType.STRING == addressesItem.getValue().getValueType()) {
String address = addressesItem.getValue().asString();
System.out
.printf("Address: %s, confidence: %.2f%n", address, addressesItem.getConfidence());
}
});
}
}
FormField companyName = recognizedFields.get("CompanyNames");
if (companyName != null) {
if (FieldValueType.LIST == companyName.getValue().getValueType()) {
List<FormField> companyNameItems = companyName.getValue().asList();
companyNameItems.forEach(companyNameItem -> {
if (FieldValueType.STRING == companyNameItem.getValue().getValueType()) {
String companyNameValue = companyNameItem.getValue().asString();
System.out.printf("Company name: %s, confidence: %.2f%n", companyNameValue,
companyNameItem.getConfidence());
}
});
}
}
}
} | System.out | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String businessCardUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/businessCards/businessCard.jpg";
SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> recognizeBusinessCardPoller =
client.beginRecognizeBusinessCardsFromUrl(businessCardUrl);
List<RecognizedForm> businessCardPageResults = recognizeBusinessCardPoller.getFinalResult();
for (int i = 0; i < businessCardPageResults.size(); i++) {
RecognizedForm recognizedForm = businessCardPageResults.get(i);
Map<String, FormField> recognizedFields = recognizedForm.getFields();
System.out.printf("----------- Recognized business card info for page %d -----------%n", i);
FormField contactNamesFormField = recognizedFields.get("ContactNames");
if (contactNamesFormField != null) {
if (FieldValueType.LIST == contactNamesFormField.getValue().getValueType()) {
List<FormField> contactNamesList = contactNamesFormField.getValue().asList();
contactNamesList.stream()
.filter(contactName -> FieldValueType.MAP == contactName.getValue().getValueType())
.map(contactName -> {
System.out.printf("Contact name: %s%n", contactName.getValueData().getText());
return contactName.getValue().asMap();
})
.forEach(contactNamesMap -> contactNamesMap.forEach((key, contactName) -> {
if ("FirstName".equals(key)) {
if (FieldValueType.STRING == contactName.getValue().getValueType()) {
String firstName = contactName.getValue().asString();
System.out.printf("\tFirst Name: %s, confidence: %.2f%n",
firstName, contactName.getConfidence());
}
}
if ("LastName".equals(key)) {
if (FieldValueType.STRING == contactName.getValue().getValueType()) {
String lastName = contactName.getValue().asString();
System.out.printf("\tLast Name: %s, confidence: %.2f%n",
lastName, contactName.getConfidence());
}
}
}));
}
}
FormField jobTitles = recognizedFields.get("JobTitles");
if (jobTitles != null) {
if (FieldValueType.LIST == jobTitles.getValue().getValueType()) {
List<FormField> jobTitlesItems = jobTitles.getValue().asList();
jobTitlesItems.forEach(jobTitlesItem -> {
if (FieldValueType.STRING == jobTitlesItem.getValue().getValueType()) {
String jobTitle = jobTitlesItem.getValue().asString();
System.out.printf("Job Title: %s, confidence: %.2f%n",
jobTitle, jobTitlesItem.getConfidence());
}
});
}
}
FormField departments = recognizedFields.get("Departments");
if (departments != null) {
if (FieldValueType.LIST == departments.getValue().getValueType()) {
List<FormField> departmentsItems = departments.getValue().asList();
departmentsItems.forEach(departmentsItem -> {
if (FieldValueType.STRING == departmentsItem.getValue().getValueType()) {
String department = departmentsItem.getValue().asString();
System.out.printf("Department: %s, confidence: %.2f%n",
department, departmentsItem.getConfidence());
}
});
}
}
FormField emails = recognizedFields.get("Emails");
if (emails != null) {
if (FieldValueType.LIST == emails.getValue().getValueType()) {
List<FormField> emailsItems = emails.getValue().asList();
emailsItems.forEach(emailsItem -> {
if (FieldValueType.STRING == emailsItem.getValue().getValueType()) {
String email = emailsItem.getValue().asString();
System.out.printf("Email: %s, confidence: %.2f%n", email, emailsItem.getConfidence());
}
});
}
}
FormField websites = recognizedFields.get("Websites");
if (websites != null) {
if (FieldValueType.LIST == websites.getValue().getValueType()) {
List<FormField> websitesItems = websites.getValue().asList();
websitesItems.forEach(websitesItem -> {
if (FieldValueType.STRING == websitesItem.getValue().getValueType()) {
String website = websitesItem.getValue().asString();
System.out.printf("Web site: %s, confidence: %.2f%n",
website, websitesItem.getConfidence());
}
});
}
}
FormField mobilePhones = recognizedFields.get("MobilePhones");
if (mobilePhones != null) {
if (FieldValueType.LIST == mobilePhones.getValue().getValueType()) {
List<FormField> mobilePhonesItems = mobilePhones.getValue().asList();
mobilePhonesItems.forEach(mobilePhonesItem -> {
if (FieldValueType.PHONE_NUMBER == mobilePhonesItem.getValue().getValueType()) {
String mobilePhoneNumber = mobilePhonesItem.getValue().asPhoneNumber();
System.out.printf("Mobile phone number: %s, confidence: %.2f%n",
mobilePhoneNumber, mobilePhonesItem.getConfidence());
}
});
}
}
FormField otherPhones = recognizedFields.get("OtherPhones");
if (otherPhones != null) {
if (FieldValueType.LIST == otherPhones.getValue().getValueType()) {
List<FormField> otherPhonesItems = otherPhones.getValue().asList();
otherPhonesItems.forEach(otherPhonesItem -> {
if (FieldValueType.PHONE_NUMBER == otherPhonesItem.getValue().getValueType()) {
String otherPhoneNumber = otherPhonesItem.getValue().asPhoneNumber();
System.out.printf("Other phone number: %s, confidence: %.2f%n",
otherPhoneNumber, otherPhonesItem.getConfidence());
}
});
}
}
FormField faxes = recognizedFields.get("Faxes");
if (faxes != null) {
if (FieldValueType.LIST == faxes.getValue().getValueType()) {
List<FormField> faxesItems = faxes.getValue().asList();
faxesItems.forEach(faxesItem -> {
if (FieldValueType.PHONE_NUMBER == faxesItem.getValue().getValueType()) {
String faxPhoneNumber = faxesItem.getValue().asPhoneNumber();
System.out.printf("Fax phone number: %s, confidence: %.2f%n",
faxPhoneNumber, faxesItem.getConfidence());
}
});
}
}
FormField addresses = recognizedFields.get("Addresses");
if (addresses != null) {
if (FieldValueType.LIST == addresses.getValue().getValueType()) {
List<FormField> addressesItems = addresses.getValue().asList();
addressesItems.forEach(addressesItem -> {
if (FieldValueType.STRING == addressesItem.getValue().getValueType()) {
String address = addressesItem.getValue().asString();
System.out
.printf("Address: %s, confidence: %.2f%n", address, addressesItem.getConfidence());
}
});
}
}
FormField companyName = recognizedFields.get("CompanyNames");
if (companyName != null) {
if (FieldValueType.LIST == companyName.getValue().getValueType()) {
List<FormField> companyNameItems = companyName.getValue().asList();
companyNameItems.forEach(companyNameItem -> {
if (FieldValueType.STRING == companyNameItem.getValue().getValueType()) {
String companyNameValue = companyNameItem.getValue().asString();
System.out.printf("Company name: %s, confidence: %.2f%n", companyNameValue,
companyNameItem.getConfidence());
}
});
}
}
}
} | class RecognizeBusinessCardFromUrl {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | class RecognizeBusinessCardFromUrl {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} |
Nit: use one comment style (my preference is // on each newline) | private void ensureCapacity(int byteBufferRemaining) throws OutOfMemoryError {
int currentCapacity = buffer.length;
int requiredCapacity = currentCapacity + byteBufferRemaining;
/*
* This validates that adding the current capacity and ByteBuffer remaining doesn't result in an integer
* overflow response by checking that the result uses the same sign as both of the addition arguments.
*/
if (((currentCapacity ^ requiredCapacity) & (byteBufferRemaining ^ requiredCapacity)) < 0) {
throw logger.logThrowableAsError(new OutOfMemoryError());
}
if (currentCapacity >= requiredCapacity) {
return;
}
int proposedNewCapacity = currentCapacity << 1;
if ((proposedNewCapacity - requiredCapacity) < 0) {
proposedNewCapacity = requiredCapacity;
}
buffer = Arrays.copyOf(buffer, proposedNewCapacity);
} | */ | private void ensureCapacity(int byteBufferRemaining) throws OutOfMemoryError {
int currentCapacity = buffer.length;
int requiredCapacity = position + byteBufferRemaining;
/*
* This validates that adding the current capacity and ByteBuffer remaining doesn't result in an integer
* overflow response by checking that the result uses the same sign as both of the addition arguments.
*/
if (((position ^ requiredCapacity) & (byteBufferRemaining ^ requiredCapacity)) < 0) {
throw logger.logExceptionAsError(new IllegalStateException(REQUESTED_BUFFER_INVALID));
}
if (currentCapacity >= requiredCapacity) {
return;
}
int proposedNewCapacity = currentCapacity << 1;
if ((proposedNewCapacity - requiredCapacity) < 0) {
proposedNewCapacity = requiredCapacity;
}
if (proposedNewCapacity < 0) {
proposedNewCapacity = Integer.MAX_VALUE - 8;
}
buffer = Arrays.copyOf(buffer, proposedNewCapacity);
} | class ByteBufferCollector {
/*
* Start with a default size of 1 KB as this is small enough to be performant while covering most small response
* sizes.
*/
private static final int DEFAULT_INITIAL_SIZE = 1024;
private static final String INVALID_INITIAL_SIZE = "'initialSize' cannot be equal to or less than 0.";
private final ClientLogger logger = new ClientLogger(ByteBufferCollector.class);
private byte[] buffer;
private int position;
/**
* Constructs a new ByteBufferCollector instance with a default sized backing array.
*/
public ByteBufferCollector() {
this(DEFAULT_INITIAL_SIZE);
}
/**
* Constructs a new ByteBufferCollector instance with a specified initial size.
*
* @param initialSize The initial size for the backing array.
* @throws IllegalArgumentException If {@code initialSize} is equal to or less than {@code 0}.
*/
public ByteBufferCollector(int initialSize) {
if (initialSize <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(INVALID_INITIAL_SIZE));
}
this.buffer = new byte[initialSize];
this.position = 0;
}
/**
* Writes a ByteBuffers content into the backing array.
*
* @param byteBuffer The ByteBuffer to concatenate into the collector.
*/
public synchronized void write(ByteBuffer byteBuffer) {
int remaining = byteBuffer.remaining();
ensureCapacity(remaining);
byteBuffer.get(buffer, position, remaining);
position += remaining;
}
/**
* Creates a copy of the backing array resized to the number of bytes written into the collector.
*
* @return A copy of the backing array.
*/
public synchronized byte[] toByteArray() {
return Arrays.copyOf(buffer, position);
}
/*
* This method ensures that the backing buffer has sufficient space to write the data from the passed ByteBuffer.
*/
} | class ByteBufferCollector {
/*
* Start with a default size of 1 KB as this is small enough to be performant while covering most small response
* sizes.
*/
private static final int DEFAULT_INITIAL_SIZE = 1024;
private static final String INVALID_INITIAL_SIZE = "'initialSize' cannot be equal to or less than 0.";
private static final String REQUESTED_BUFFER_INVALID = "Required capacity is greater than Integer.MAX_VALUE.";
private final ClientLogger logger = new ClientLogger(ByteBufferCollector.class);
private byte[] buffer;
private int position;
/**
* Constructs a new ByteBufferCollector instance with a default sized backing array.
*/
public ByteBufferCollector() {
this(DEFAULT_INITIAL_SIZE);
}
/**
* Constructs a new ByteBufferCollector instance with a specified initial size.
*
* @param initialSize The initial size for the backing array.
* @throws IllegalArgumentException If {@code initialSize} is equal to or less than {@code 0}.
*/
public ByteBufferCollector(int initialSize) {
if (initialSize <= 0) {
throw logger.logExceptionAsError(new IllegalArgumentException(INVALID_INITIAL_SIZE));
}
this.buffer = new byte[initialSize];
this.position = 0;
}
/**
* Writes a ByteBuffers content into the backing array.
*
* @param byteBuffer The ByteBuffer to concatenate into the collector.
* @throws IllegalStateException If the size of the backing array would be larger than {@link Integer
* when the passed buffer is written.
*/
public synchronized void write(ByteBuffer byteBuffer) {
if (byteBuffer == null) {
return;
}
int remaining = byteBuffer.remaining();
if (remaining == 0) {
return;
}
ensureCapacity(remaining);
byteBuffer.get(buffer, position, remaining);
position += remaining;
}
/**
* Creates a copy of the backing array resized to the number of bytes written into the collector.
*
* @return A copy of the backing array.
*/
public synchronized byte[] toByteArray() {
return Arrays.copyOf(buffer, position);
}
/*
* This method ensures that the backing buffer has sufficient space to write the data from the passed ByteBuffer.
*/
} |
null check for `stream` and in that case return empty byte array ? | public static Mono<byte[]> collectBytesInByteBufferStream(Flux<ByteBuffer> stream, int sizeHint) {
return stream.collect(() -> new ByteBufferCollector(sizeHint), ByteBufferCollector::write)
.map(ByteBufferCollector::toByteArray);
} | return stream.collect(() -> new ByteBufferCollector(sizeHint), ByteBufferCollector::write) | public static Mono<byte[]> collectBytesInByteBufferStream(Flux<ByteBuffer> stream, int sizeHint) {
return stream.collect(() -> new ByteBufferCollector(sizeHint), ByteBufferCollector::write)
.map(ByteBufferCollector::toByteArray);
} | class FluxUtil {
/**
* Checks if a type is Flux<ByteBuffer>.
*
* @param entityType the type to check
* @return whether the type represents a Flux that emits ByteBuffer
*/
public static boolean isFluxByteBuffer(Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Flux.class)) {
final Type innerType = TypeUtil.getTypeArguments(entityType)[0];
return TypeUtil.isTypeOrSubTypeOf(innerType, ByteBuffer.class);
}
return false;
}
/**
* Collects ByteBuffers emitted by a Flux into a byte array.
*
* @param stream A stream which emits ByteBuffer instances.
* @return A Mono which emits the concatenation of all the ByteBuffer instances given by the source Flux.
* @throws IllegalStateException If the combined size of the emitted ByteBuffers is greater than {@link
* Integer
*/
public static Mono<byte[]> collectBytesInByteBufferStream(Flux<ByteBuffer> stream) {
return stream.collect(ByteBufferCollector::new, ByteBufferCollector::write)
.map(ByteBufferCollector::toByteArray);
}
/**
* Collects ByteBuffers emitted by a Flux into a byte array.
* <p>
* Unlike {@link
* This size hint allows for optimizations when creating the initial buffer to reduce the number of times it needs
* to be resized while concatenating emitted ByteBuffers.
*
* @param stream A stream which emits ByteBuffer instances.
* @param sizeHint A hint about the expected stream size.
* @return A Mono which emits the concatenation of all the ByteBuffer instances given by the source Flux.
* @throws IllegalArgumentException If {@code sizeHint} is equal to or less than {@code 0}.
* @throws IllegalStateException If the combined size of the emitted ByteBuffers is greater than {@link
* Integer
*/
/**
* Gets the content of the provided ByteBuffer as a byte array. This method will create a new byte array even if the
* ByteBuffer can have optionally backing array.
*
* @param byteBuffer the byte buffer
* @return the byte array
*/
public static byte[] byteBufferToArray(ByteBuffer byteBuffer) {
int length = byteBuffer.remaining();
byte[] byteArray = new byte[length];
byteBuffer.get(byteArray);
return byteArray;
}
/**
* Converts an {@link InputStream} into a {@link Flux} of {@link ByteBuffer} using a chunk size of 4096.
* <p>
* Given that {@link InputStream} is not guaranteed to be replayable the returned {@link Flux} should be considered
* non-replayable as well.
* <p>
* If the passed {@link InputStream} is {@code null} {@link Flux
*
* @param inputStream The {@link InputStream} to convert into a {@link Flux}.
* @return A {@link Flux} of {@link ByteBuffer ByteBuffers} that contains the contents of the stream.
*/
public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
return toFluxByteBuffer(inputStream, 4096);
}
/**
* Converts an {@link InputStream} into a {@link Flux} of {@link ByteBuffer}.
* <p>
* Given that {@link InputStream} is not guaranteed to be replayable the returned {@link Flux} should be considered
* non-replayable as well.
* <p>
* If the passed {@link InputStream} is {@code null} {@link Flux
*
* @param inputStream The {@link InputStream} to convert into a {@link Flux}.
* @param chunkSize The requested size for each {@link ByteBuffer}.
* @return A {@link Flux} of {@link ByteBuffer ByteBuffers} that contains the contents of the stream.
* @throws IllegalArgumentException If {@code chunkSize} is less than or equal to {@code 0}.
*/
public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream, int chunkSize) {
if (chunkSize <= 0) {
return Flux.error(new IllegalArgumentException("'chunkSize' must be greater than 0."));
}
if (inputStream == null) {
return Flux.empty();
}
return Flux.<ByteBuffer, InputStream>generate(() -> inputStream, (stream, sink) -> {
byte[] buffer = new byte[chunkSize];
try {
int offset = 0;
while (offset < chunkSize) {
int readCount = inputStream.read(buffer, offset, chunkSize - offset);
if (readCount == -1) {
if (offset > 0) {
sink.next(ByteBuffer.wrap(buffer, 0, offset));
}
sink.complete();
return stream;
}
offset += readCount;
}
sink.next(ByteBuffer.wrap(buffer));
} catch (IOException ex) {
sink.error(ex);
}
return stream;
}).filter(ByteBuffer::hasRemaining);
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context} and calls the given lambda function with this context and returns a
* single entity of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* <p><strong>Code samples</strong></p>
* {@codesnippet com.azure.core.implementation.util.fluxutil.withcontext}
*
* @param serviceCall The lambda function that makes the service call into which azure context will be passed
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Mono<T> withContext(Function<Context, Mono<T>> serviceCall) {
return withContext(serviceCall, Collections.emptyMap());
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context}, adds the specified context attributes and calls the given lambda
* function with this context and returns a single entity of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* @param serviceCall serviceCall The lambda function that makes the service call into which azure context will be
* passed
* @param contextAttributes The map of attributes sent by the calling method to be set on {@link Context}.
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Mono<T> withContext(Function<Context, Mono<T>> serviceCall,
Map<String, String> contextAttributes) {
return Mono.subscriberContext()
.map(context -> {
final Context[] azureContext = new Context[]{Context.NONE};
if (!CoreUtils.isNullOrEmpty(contextAttributes)) {
contextAttributes.forEach((key, value) -> azureContext[0] = azureContext[0].addData(key, value));
}
if (!context.isEmpty()) {
context.stream().forEach(entry ->
azureContext[0] = azureContext[0].addData(entry.getKey(), entry.getValue()));
}
return azureContext[0];
})
.flatMap(serviceCall);
}
/**
* Converts the incoming content to Mono.
*
* @param <T> The type of the Response, which will be returned in the Mono.
* @param response whose {@link Response
* @return The converted {@link Mono}
*/
public static <T> Mono<T> toMono(Response<T> response) {
return Mono.justOrEmpty(response.getValue());
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link Mono}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link Mono} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> Mono<T> monoError(ClientLogger logger, RuntimeException ex) {
return Mono.error(logger.logExceptionAsError(Exceptions.propagate(ex)));
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link Flux}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link Flux} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> Flux<T> fluxError(ClientLogger logger, RuntimeException ex) {
return Flux.error(logger.logExceptionAsError(Exceptions.propagate(ex)));
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link PagedFlux}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link PagedFlux} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> PagedFlux<T> pagedFluxError(ClientLogger logger, RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context} and calls the given lambda function with this context and returns a
* collection of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* <p><strong>Code samples</strong></p>
* {@codesnippet com.azure.core.implementation.util.fluxutil.fluxcontext}
*
* @param serviceCall The lambda function that makes the service call into which the context will be passed
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Flux<T> fluxContext(Function<Context, Flux<T>> serviceCall) {
return Mono.subscriberContext()
.map(FluxUtil::toAzureContext)
.flatMapMany(serviceCall);
}
/**
* Converts a reactor context to azure context. If the reactor context is {@code null} or empty, {@link
* Context
*
* @param context The reactor context
* @return The azure context
*/
private static Context toAzureContext(reactor.util.context.Context context) {
final Context[] azureContext = new Context[]{Context.NONE};
if (!context.isEmpty()) {
context.stream().forEach(entry ->
azureContext[0] = azureContext[0].addData(entry.getKey(), entry.getValue()));
}
return azureContext[0];
}
/**
* Converts an Azure context to Reactor context. If the Azure context is {@code null} or empty, {@link
* reactor.util.context.Context
*
* @param context The Azure context.
* @return The Reactor context.
*/
public static reactor.util.context.Context toReactorContext(Context context) {
if (context == null) {
return reactor.util.context.Context.empty();
}
Map<Object, Object> contextValues = context.getValues().entrySet().stream()
.filter(kvp -> kvp.getValue() != null)
.collect(Collectors.toMap(Entry::getKey, Entry::getValue));
return CoreUtils.isNullOrEmpty(contextValues)
? reactor.util.context.Context.empty()
: reactor.util.context.Context.of(contextValues);
}
/**
* Writes the bytes emitted by a Flux to an AsynchronousFileChannel.
*
* @param content the Flux content
* @param outFile the file channel
* @return a Mono which performs the write operation when subscribed
*/
public static Mono<Void> writeFile(Flux<ByteBuffer> content, AsynchronousFileChannel outFile) {
return writeFile(content, outFile, 0);
}
/**
* Writes the bytes emitted by a Flux to an AsynchronousFileChannel starting at the given position in the file.
*
* @param content the Flux content
* @param outFile the file channel
* @param position the position in the file to begin writing
* @return a Mono which performs the write operation when subscribed
*/
public static Mono<Void> writeFile(Flux<ByteBuffer> content, AsynchronousFileChannel outFile, long position) {
return Mono.create(emitter -> content.subscribe(new Subscriber<ByteBuffer>() {
volatile boolean isWriting = false;
volatile boolean isCompleted = false;
volatile Subscription subscription;
volatile long pos = position;
@Override
public void onSubscribe(Subscription s) {
subscription = s;
s.request(1);
}
@Override
public void onNext(ByteBuffer bytes) {
isWriting = true;
outFile.write(bytes, pos, null, onWriteCompleted);
}
final CompletionHandler<Integer, Object> onWriteCompleted = new CompletionHandler<Integer, Object>() {
@Override
public void completed(Integer bytesWritten, Object attachment) {
isWriting = false;
if (isCompleted) {
emitter.success();
}
pos += bytesWritten;
subscription.request(1);
}
@Override
public void failed(Throwable exc, Object attachment) {
subscription.cancel();
emitter.error(exc);
}
};
@Override
public void onError(Throwable throwable) {
subscription.cancel();
emitter.error(throwable);
}
@Override
public void onComplete() {
isCompleted = true;
if (!isWriting) {
emitter.success();
}
}
}));
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads part of a file into chunks of the
* given size.
*
* @param fileChannel The file channel.
* @param chunkSize the size of file chunks to read.
* @param offset The offset in the file to begin reading.
* @param length The number of bytes to read from the file.
* @return the Flux.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel, int chunkSize, long offset,
long length) {
return new FileReadFlux(fileChannel, chunkSize, offset, length);
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads part of a file.
*
* @param fileChannel The file channel.
* @param offset The offset in the file to begin reading.
* @param length The number of bytes to read from the file.
* @return the Flux.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel, long offset, long length) {
return readFile(fileChannel, DEFAULT_CHUNK_SIZE, offset, length);
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads the entire file.
*
* @param fileChannel The file channel.
* @return The AsyncInputStream.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel) {
try {
long size = fileChannel.size();
return readFile(fileChannel, DEFAULT_CHUNK_SIZE, 0, size);
} catch (IOException e) {
return Flux.error(new RuntimeException("Failed to read the file.", e));
}
}
private static final int DEFAULT_CHUNK_SIZE = 1024 * 64;
private static final class FileReadFlux extends Flux<ByteBuffer> {
private final AsynchronousFileChannel fileChannel;
private final int chunkSize;
private final long offset;
private final long length;
FileReadFlux(AsynchronousFileChannel fileChannel, int chunkSize, long offset, long length) {
this.fileChannel = fileChannel;
this.chunkSize = chunkSize;
this.offset = offset;
this.length = length;
}
@Override
public void subscribe(CoreSubscriber<? super ByteBuffer> actual) {
FileReadSubscription subscription =
new FileReadSubscription(actual, fileChannel, chunkSize, offset, length);
actual.onSubscribe(subscription);
}
static final class FileReadSubscription implements Subscription, CompletionHandler<Integer, ByteBuffer> {
private static final int NOT_SET = -1;
private static final long serialVersionUID = -6831808726875304256L;
private final Subscriber<? super ByteBuffer> subscriber;
private volatile long position;
private final AsynchronousFileChannel fileChannel;
private final int chunkSize;
private final long offset;
private final long length;
private volatile boolean done;
private Throwable error;
private volatile ByteBuffer next;
private volatile boolean cancelled;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<FileReadSubscription> WIP =
AtomicIntegerFieldUpdater.newUpdater(FileReadSubscription.class, "wip");
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<FileReadSubscription> REQUESTED =
AtomicLongFieldUpdater.newUpdater(FileReadSubscription.class, "requested");
FileReadSubscription(Subscriber<? super ByteBuffer> subscriber, AsynchronousFileChannel fileChannel,
int chunkSize, long offset, long length) {
this.subscriber = subscriber;
this.fileChannel = fileChannel;
this.chunkSize = chunkSize;
this.offset = offset;
this.length = length;
this.position = NOT_SET;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
drain();
}
}
@Override
public void cancel() {
this.cancelled = true;
}
@Override
public void completed(Integer bytesRead, ByteBuffer buffer) {
if (!cancelled) {
if (bytesRead == -1) {
done = true;
} else {
long pos = position;
int bytesWanted = Math.min(bytesRead, maxRequired(pos));
long position2 = pos + bytesWanted;
position = position2;
buffer.position(bytesWanted);
buffer.flip();
next = buffer;
if (position2 >= offset + length) {
done = true;
}
}
drain();
}
}
@Override
public void failed(Throwable exc, ByteBuffer attachment) {
if (!cancelled) {
error = exc;
done = true;
drain();
}
}
private void drain() {
if (WIP.getAndIncrement(this) != 0) {
return;
}
if (position == NOT_SET) {
position = offset;
doRead();
}
int missed = 1;
while (true) {
if (cancelled) {
return;
}
if (REQUESTED.get(this) > 0) {
boolean emitted = false;
boolean d = done;
ByteBuffer bb = next;
if (bb != null) {
next = null;
subscriber.onNext(bb);
emitted = true;
}
if (d) {
if (error != null) {
subscriber.onError(error);
} else {
subscriber.onComplete();
}
return;
}
if (emitted) {
Operators.produced(REQUESTED, this, 1);
doRead();
}
}
missed = WIP.addAndGet(this, -missed);
if (missed == 0) {
return;
}
}
}
private void doRead() {
long pos = position;
ByteBuffer innerBuf = ByteBuffer.allocate(Math.min(chunkSize, maxRequired(pos)));
fileChannel.read(innerBuf, pos, innerBuf, this);
}
private int maxRequired(long pos) {
long maxRequired = offset + length - pos;
if (maxRequired <= 0) {
return 0;
} else {
int m = (int) (maxRequired);
if (m < 0) {
return Integer.MAX_VALUE;
} else {
return m;
}
}
}
}
}
private FluxUtil() {
}
} | class FluxUtil {
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
/**
* Checks if a type is Flux<ByteBuffer>.
*
* @param entityType the type to check
* @return whether the type represents a Flux that emits ByteBuffer
*/
public static boolean isFluxByteBuffer(Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Flux.class)) {
final Type innerType = TypeUtil.getTypeArguments(entityType)[0];
return TypeUtil.isTypeOrSubTypeOf(innerType, ByteBuffer.class);
}
return false;
}
/**
* Collects ByteBuffers emitted by a Flux into a byte array.
*
* @param stream A stream which emits ByteBuffer instances.
* @return A Mono which emits the concatenation of all the ByteBuffer instances given by the source Flux.
* @throws IllegalStateException If the combined size of the emitted ByteBuffers is greater than {@link
* Integer
*/
public static Mono<byte[]> collectBytesInByteBufferStream(Flux<ByteBuffer> stream) {
return stream.collect(ByteBufferCollector::new, ByteBufferCollector::write)
.map(ByteBufferCollector::toByteArray);
}
/**
* Collects ByteBuffers emitted by a Flux into a byte array.
* <p>
* Unlike {@link
* This size hint allows for optimizations when creating the initial buffer to reduce the number of times it needs
* to be resized while concatenating emitted ByteBuffers.
*
* @param stream A stream which emits ByteBuffer instances.
* @param sizeHint A hint about the expected stream size.
* @return A Mono which emits the concatenation of all the ByteBuffer instances given by the source Flux.
* @throws IllegalArgumentException If {@code sizeHint} is equal to or less than {@code 0}.
* @throws IllegalStateException If the combined size of the emitted ByteBuffers is greater than {@link
* Integer
*/
/**
* Collects ByteBuffers returned in a network response into a byte array.
* <p>
* The {@code headers} are inspected for containing an {@code Content-Length} which determines if a size hinted
* collection, {@link
* {@link
*
* @param stream A network response ByteBuffer stream.
* @param headers The HTTP headers of the response.
* @return A Mono which emits the collected network response ByteBuffers.
* @throws NullPointerException If {@code headers} is null.
* @throws IllegalStateException If the size of the network response is greater than {@link Integer
*/
public static Mono<byte[]> collectBytesFromNetworkResponse(Flux<ByteBuffer> stream, HttpHeaders headers) {
Objects.requireNonNull(headers, "'headers' cannot be null.");
String contentLengthHeader = headers.getValue("Content-Length");
if (contentLengthHeader == null) {
return FluxUtil.collectBytesInByteBufferStream(stream);
} else {
try {
int contentLength = Integer.parseInt(contentLengthHeader);
if (contentLength > 0) {
return FluxUtil.collectBytesInByteBufferStream(stream, contentLength);
} else {
return Mono.just(EMPTY_BYTE_ARRAY);
}
} catch (NumberFormatException ex) {
return FluxUtil.collectBytesInByteBufferStream(stream);
}
}
}
/**
* Gets the content of the provided ByteBuffer as a byte array. This method will create a new byte array even if the
* ByteBuffer can have optionally backing array.
*
* @param byteBuffer the byte buffer
* @return the byte array
*/
public static byte[] byteBufferToArray(ByteBuffer byteBuffer) {
int length = byteBuffer.remaining();
byte[] byteArray = new byte[length];
byteBuffer.get(byteArray);
return byteArray;
}
/**
* Converts an {@link InputStream} into a {@link Flux} of {@link ByteBuffer} using a chunk size of 4096.
* <p>
* Given that {@link InputStream} is not guaranteed to be replayable the returned {@link Flux} should be considered
* non-replayable as well.
* <p>
* If the passed {@link InputStream} is {@code null} {@link Flux
*
* @param inputStream The {@link InputStream} to convert into a {@link Flux}.
* @return A {@link Flux} of {@link ByteBuffer ByteBuffers} that contains the contents of the stream.
*/
public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream) {
return toFluxByteBuffer(inputStream, 4096);
}
/**
* Converts an {@link InputStream} into a {@link Flux} of {@link ByteBuffer}.
* <p>
* Given that {@link InputStream} is not guaranteed to be replayable the returned {@link Flux} should be considered
* non-replayable as well.
* <p>
* If the passed {@link InputStream} is {@code null} {@link Flux
*
* @param inputStream The {@link InputStream} to convert into a {@link Flux}.
* @param chunkSize The requested size for each {@link ByteBuffer}.
* @return A {@link Flux} of {@link ByteBuffer ByteBuffers} that contains the contents of the stream.
* @throws IllegalArgumentException If {@code chunkSize} is less than or equal to {@code 0}.
*/
public static Flux<ByteBuffer> toFluxByteBuffer(InputStream inputStream, int chunkSize) {
if (chunkSize <= 0) {
return Flux.error(new IllegalArgumentException("'chunkSize' must be greater than 0."));
}
if (inputStream == null) {
return Flux.empty();
}
return Flux.<ByteBuffer, InputStream>generate(() -> inputStream, (stream, sink) -> {
byte[] buffer = new byte[chunkSize];
try {
int offset = 0;
while (offset < chunkSize) {
int readCount = inputStream.read(buffer, offset, chunkSize - offset);
if (readCount == -1) {
if (offset > 0) {
sink.next(ByteBuffer.wrap(buffer, 0, offset));
}
sink.complete();
return stream;
}
offset += readCount;
}
sink.next(ByteBuffer.wrap(buffer));
} catch (IOException ex) {
sink.error(ex);
}
return stream;
}).filter(ByteBuffer::hasRemaining);
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context} and calls the given lambda function with this context and returns a
* single entity of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* <p><strong>Code samples</strong></p>
* {@codesnippet com.azure.core.implementation.util.fluxutil.withcontext}
*
* @param serviceCall The lambda function that makes the service call into which azure context will be passed
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Mono<T> withContext(Function<Context, Mono<T>> serviceCall) {
return withContext(serviceCall, Collections.emptyMap());
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context}, adds the specified context attributes and calls the given lambda
* function with this context and returns a single entity of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* @param serviceCall serviceCall The lambda function that makes the service call into which azure context will be
* passed
* @param contextAttributes The map of attributes sent by the calling method to be set on {@link Context}.
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Mono<T> withContext(Function<Context, Mono<T>> serviceCall,
Map<String, String> contextAttributes) {
return Mono.subscriberContext()
.map(context -> {
final Context[] azureContext = new Context[]{Context.NONE};
if (!CoreUtils.isNullOrEmpty(contextAttributes)) {
contextAttributes.forEach((key, value) -> azureContext[0] = azureContext[0].addData(key, value));
}
if (!context.isEmpty()) {
context.stream().forEach(entry ->
azureContext[0] = azureContext[0].addData(entry.getKey(), entry.getValue()));
}
return azureContext[0];
})
.flatMap(serviceCall);
}
/**
* Converts the incoming content to Mono.
*
* @param <T> The type of the Response, which will be returned in the Mono.
* @param response whose {@link Response
* @return The converted {@link Mono}
*/
public static <T> Mono<T> toMono(Response<T> response) {
return Mono.justOrEmpty(response.getValue());
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link Mono}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link Mono} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> Mono<T> monoError(ClientLogger logger, RuntimeException ex) {
return Mono.error(logger.logExceptionAsError(Exceptions.propagate(ex)));
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link Flux}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link Flux} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> Flux<T> fluxError(ClientLogger logger, RuntimeException ex) {
return Flux.error(logger.logExceptionAsError(Exceptions.propagate(ex)));
}
/**
* Propagates a {@link RuntimeException} through the error channel of {@link PagedFlux}.
*
* @param logger The {@link ClientLogger} to log the exception.
* @param ex The {@link RuntimeException}.
* @param <T> The return type.
* @return A {@link PagedFlux} that terminates with error wrapping the {@link RuntimeException}.
*/
public static <T> PagedFlux<T> pagedFluxError(ClientLogger logger, RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
/**
* This method converts the incoming {@code subscriberContext} from {@link reactor.util.context.Context Reactor
* Context} to {@link Context Azure Context} and calls the given lambda function with this context and returns a
* collection of type {@code T}
* <p>
* If the reactor context is empty, {@link Context
* </p>
*
* <p><strong>Code samples</strong></p>
* {@codesnippet com.azure.core.implementation.util.fluxutil.fluxcontext}
*
* @param serviceCall The lambda function that makes the service call into which the context will be passed
* @param <T> The type of response returned from the service call
* @return The response from service call
*/
public static <T> Flux<T> fluxContext(Function<Context, Flux<T>> serviceCall) {
return Mono.subscriberContext()
.map(FluxUtil::toAzureContext)
.flatMapMany(serviceCall);
}
/**
* Converts a reactor context to azure context. If the reactor context is {@code null} or empty, {@link
* Context
*
* @param context The reactor context
* @return The azure context
*/
private static Context toAzureContext(reactor.util.context.Context context) {
final Context[] azureContext = new Context[]{Context.NONE};
if (!context.isEmpty()) {
context.stream().forEach(entry ->
azureContext[0] = azureContext[0].addData(entry.getKey(), entry.getValue()));
}
return azureContext[0];
}
/**
* Converts an Azure context to Reactor context. If the Azure context is {@code null} or empty, {@link
* reactor.util.context.Context
*
* @param context The Azure context.
* @return The Reactor context.
*/
public static reactor.util.context.Context toReactorContext(Context context) {
if (context == null) {
return reactor.util.context.Context.empty();
}
Map<Object, Object> contextValues = context.getValues().entrySet().stream()
.filter(kvp -> kvp.getValue() != null)
.collect(Collectors.toMap(Entry::getKey, Entry::getValue));
return CoreUtils.isNullOrEmpty(contextValues)
? reactor.util.context.Context.empty()
: reactor.util.context.Context.of(contextValues);
}
/**
* Writes the bytes emitted by a Flux to an AsynchronousFileChannel.
*
* @param content the Flux content
* @param outFile the file channel
* @return a Mono which performs the write operation when subscribed
*/
public static Mono<Void> writeFile(Flux<ByteBuffer> content, AsynchronousFileChannel outFile) {
return writeFile(content, outFile, 0);
}
/**
* Writes the bytes emitted by a Flux to an AsynchronousFileChannel starting at the given position in the file.
*
* @param content the Flux content
* @param outFile the file channel
* @param position the position in the file to begin writing
* @return a Mono which performs the write operation when subscribed
*/
public static Mono<Void> writeFile(Flux<ByteBuffer> content, AsynchronousFileChannel outFile, long position) {
return Mono.create(emitter -> content.subscribe(new Subscriber<ByteBuffer>() {
volatile boolean isWriting = false;
volatile boolean isCompleted = false;
volatile Subscription subscription;
volatile long pos = position;
@Override
public void onSubscribe(Subscription s) {
subscription = s;
s.request(1);
}
@Override
public void onNext(ByteBuffer bytes) {
isWriting = true;
outFile.write(bytes, pos, null, onWriteCompleted);
}
final CompletionHandler<Integer, Object> onWriteCompleted = new CompletionHandler<Integer, Object>() {
@Override
public void completed(Integer bytesWritten, Object attachment) {
isWriting = false;
if (isCompleted) {
emitter.success();
}
pos += bytesWritten;
subscription.request(1);
}
@Override
public void failed(Throwable exc, Object attachment) {
subscription.cancel();
emitter.error(exc);
}
};
@Override
public void onError(Throwable throwable) {
subscription.cancel();
emitter.error(throwable);
}
@Override
public void onComplete() {
isCompleted = true;
if (!isWriting) {
emitter.success();
}
}
}));
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads part of a file into chunks of the
* given size.
*
* @param fileChannel The file channel.
* @param chunkSize the size of file chunks to read.
* @param offset The offset in the file to begin reading.
* @param length The number of bytes to read from the file.
* @return the Flux.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel, int chunkSize, long offset,
long length) {
return new FileReadFlux(fileChannel, chunkSize, offset, length);
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads part of a file.
*
* @param fileChannel The file channel.
* @param offset The offset in the file to begin reading.
* @param length The number of bytes to read from the file.
* @return the Flux.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel, long offset, long length) {
return readFile(fileChannel, DEFAULT_CHUNK_SIZE, offset, length);
}
/**
* Creates a {@link Flux} from an {@link AsynchronousFileChannel} which reads the entire file.
*
* @param fileChannel The file channel.
* @return The AsyncInputStream.
*/
public static Flux<ByteBuffer> readFile(AsynchronousFileChannel fileChannel) {
try {
long size = fileChannel.size();
return readFile(fileChannel, DEFAULT_CHUNK_SIZE, 0, size);
} catch (IOException e) {
return Flux.error(new RuntimeException("Failed to read the file.", e));
}
}
private static final int DEFAULT_CHUNK_SIZE = 1024 * 64;
private static final class FileReadFlux extends Flux<ByteBuffer> {
private final AsynchronousFileChannel fileChannel;
private final int chunkSize;
private final long offset;
private final long length;
FileReadFlux(AsynchronousFileChannel fileChannel, int chunkSize, long offset, long length) {
this.fileChannel = fileChannel;
this.chunkSize = chunkSize;
this.offset = offset;
this.length = length;
}
@Override
public void subscribe(CoreSubscriber<? super ByteBuffer> actual) {
FileReadSubscription subscription =
new FileReadSubscription(actual, fileChannel, chunkSize, offset, length);
actual.onSubscribe(subscription);
}
static final class FileReadSubscription implements Subscription, CompletionHandler<Integer, ByteBuffer> {
private static final int NOT_SET = -1;
private static final long serialVersionUID = -6831808726875304256L;
private final Subscriber<? super ByteBuffer> subscriber;
private volatile long position;
private final AsynchronousFileChannel fileChannel;
private final int chunkSize;
private final long offset;
private final long length;
private volatile boolean done;
private Throwable error;
private volatile ByteBuffer next;
private volatile boolean cancelled;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<FileReadSubscription> WIP =
AtomicIntegerFieldUpdater.newUpdater(FileReadSubscription.class, "wip");
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<FileReadSubscription> REQUESTED =
AtomicLongFieldUpdater.newUpdater(FileReadSubscription.class, "requested");
FileReadSubscription(Subscriber<? super ByteBuffer> subscriber, AsynchronousFileChannel fileChannel,
int chunkSize, long offset, long length) {
this.subscriber = subscriber;
this.fileChannel = fileChannel;
this.chunkSize = chunkSize;
this.offset = offset;
this.length = length;
this.position = NOT_SET;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
drain();
}
}
@Override
public void cancel() {
this.cancelled = true;
}
@Override
public void completed(Integer bytesRead, ByteBuffer buffer) {
if (!cancelled) {
if (bytesRead == -1) {
done = true;
} else {
long pos = position;
int bytesWanted = Math.min(bytesRead, maxRequired(pos));
long position2 = pos + bytesWanted;
position = position2;
buffer.position(bytesWanted);
buffer.flip();
next = buffer;
if (position2 >= offset + length) {
done = true;
}
}
drain();
}
}
@Override
public void failed(Throwable exc, ByteBuffer attachment) {
if (!cancelled) {
error = exc;
done = true;
drain();
}
}
private void drain() {
if (WIP.getAndIncrement(this) != 0) {
return;
}
if (position == NOT_SET) {
position = offset;
doRead();
}
int missed = 1;
while (true) {
if (cancelled) {
return;
}
if (REQUESTED.get(this) > 0) {
boolean emitted = false;
boolean d = done;
ByteBuffer bb = next;
if (bb != null) {
next = null;
subscriber.onNext(bb);
emitted = true;
}
if (d) {
if (error != null) {
subscriber.onError(error);
} else {
subscriber.onComplete();
}
return;
}
if (emitted) {
Operators.produced(REQUESTED, this, 1);
doRead();
}
}
missed = WIP.addAndGet(this, -missed);
if (missed == 0) {
return;
}
}
}
private void doRead() {
long pos = position;
ByteBuffer innerBuf = ByteBuffer.allocate(Math.min(chunkSize, maxRequired(pos)));
fileChannel.read(innerBuf, pos, innerBuf, this);
}
private int maxRequired(long pos) {
long maxRequired = offset + length - pos;
if (maxRequired <= 0) {
return 0;
} else {
int m = (int) (maxRequired);
if (m < 0) {
return Integer.MAX_VALUE;
} else {
return m;
}
}
}
}
}
private FluxUtil() {
}
} |
Nice , thanks for introducing fault tolerance. | private void validateDataCreation(int expectedSize) {
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Validating {} documents were loaded into [{}:{}]",
expectedSize, _configuration.getDatabaseId(), containerName);
final List<FeedResponse<ObjectNode>> queryItemsResponseList = container
.queryItems(COUNT_ALL_QUERY, ObjectNode.class)
.byPage()
.collectList()
.block(BULK_LOAD_WAIT_DURATION);
final int resultCount = Optional.ofNullable(queryItemsResponseList)
.map(responseList -> responseList.get(0))
.map(FeedResponse::getResults)
.map(list -> list.get(0))
.map(objectNode -> objectNode.get(COUNT_ALL_QUERY_RESULT_FIELD).intValue())
.orElse(0);
if (resultCount < (expectedSize * 0.90)) {
throw new IllegalStateException(
String.format("Number of documents %d in the container %s is less than the expected threshold %f ",
resultCount, containerName, (expectedSize * 0.90)));
}
} | if (resultCount < (expectedSize * 0.90)) { | private void validateDataCreation(int expectedSize) {
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Validating {} documents were loaded into [{}:{}]",
expectedSize, _configuration.getDatabaseId(), containerName);
final List<FeedResponse<ObjectNode>> queryItemsResponseList = container
.queryItems(COUNT_ALL_QUERY, ObjectNode.class)
.byPage()
.collectList()
.block(BULK_LOAD_WAIT_DURATION);
final int resultCount = Optional.ofNullable(queryItemsResponseList)
.map(responseList -> responseList.get(0))
.map(FeedResponse::getResults)
.map(list -> list.get(0))
.map(objectNode -> objectNode.get(COUNT_ALL_QUERY_RESULT_FIELD).intValue())
.orElse(0);
if (resultCount < (expectedSize * 0.90)) {
throw new IllegalStateException(
String.format("Number of documents %d in the container %s is less than the expected threshold %f ",
resultCount, containerName, (expectedSize * 0.90)));
}
} | class DataLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(DataLoader.class);
private static final int MAX_BATCH_SIZE = 10000;
private static final int BULK_OPERATION_CONCURRENCY = 5;
private static final Duration BULK_LOAD_WAIT_DURATION = Duration.ofSeconds(120);
private static final String COUNT_ALL_QUERY = "SELECT COUNT(1) FROM c";
private static final String COUNT_ALL_QUERY_RESULT_FIELD = "$1";
private final Configuration _configuration;
private final CosmosAsyncClient _client;
private final DataGenerator _dataGenerator;
public DataLoader(final Configuration configuration, final CosmosAsyncClient client) {
_configuration = Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
_client = Preconditions.checkNotNull(client,
"The CosmosAsyncClient needed for data loading can not be null");
_dataGenerator = new DataGenerator(_configuration.getNumberOfPreCreatedDocuments());
}
public void loadData() {
LOGGER.info("Starting batched data loading, loading {} documents in each iteration", DataGenerator.BATCH_SIZE);
while (_dataGenerator.hasNext()) {
final Map<Key, ObjectNode> newDocuments = _dataGenerator.next();
bulkCreateItems(newDocuments);
newDocuments.clear();
}
validateDataCreation(_dataGenerator.getGeneratedKeys().size());
}
private void bulkCreateItems(final Map<Key, ObjectNode> records) {
final List<CosmosItemOperation> cosmosItemOperations = mapToCosmosItemOperation(records);
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Bulk loading {} documents in [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
final BulkProcessingOptions<Object> bulkProcessingOptions = new BulkProcessingOptions<>(Object.class);
bulkProcessingOptions.setMaxMicroBatchSize(MAX_BATCH_SIZE)
.setMaxMicroBatchConcurrency(BULK_OPERATION_CONCURRENCY);
container.processBulkOperations(Flux.fromIterable(cosmosItemOperations), bulkProcessingOptions)
.blockLast(BULK_LOAD_WAIT_DURATION);
LOGGER.info("Completed loading {} documents into [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
}
/**
* Map the generated data to createItem requests in the underlying container
*
* @param records Data we want to load into the container
* @return List of CosmosItemOperation, each mapping to a createItem for that record
*/
private List<CosmosItemOperation> mapToCosmosItemOperation(final Map<Key, ObjectNode> records) {
return records.entrySet()
.stream()
.map(record -> {
final String partitionKey = record.getKey().getPartitioningKey();
final ObjectNode value = record.getValue();
return BulkOperations.getCreateItemOperation(value, new PartitionKey(partitionKey));
})
.collect(Collectors.toList());
}
/**
* @return Set of Keys representing each document loaded into the test collection
*/
public Set<Key> getLoadedDataKeys() {
return _dataGenerator.getGeneratedKeys();
}
} | class DataLoader {
private static final Logger LOGGER = LoggerFactory.getLogger(DataLoader.class);
private static final int MAX_BATCH_SIZE = 10000;
private static final int BULK_OPERATION_CONCURRENCY = 5;
private static final Duration BULK_LOAD_WAIT_DURATION = Duration.ofSeconds(120);
private static final String COUNT_ALL_QUERY = "SELECT COUNT(1) FROM c";
private static final String COUNT_ALL_QUERY_RESULT_FIELD = "$1";
private final Configuration _configuration;
private final CosmosAsyncClient _client;
private final DataGenerator _dataGenerator;
public DataLoader(final Configuration configuration, final CosmosAsyncClient client) {
_configuration = Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
_client = Preconditions.checkNotNull(client,
"The CosmosAsyncClient needed for data loading can not be null");
_dataGenerator = new DataGenerator(_configuration.getNumberOfPreCreatedDocuments());
}
public void loadData() {
LOGGER.info("Starting batched data loading, loading {} documents in each iteration", DataGenerator.BATCH_SIZE);
while (_dataGenerator.hasNext()) {
final Map<Key, ObjectNode> newDocuments = _dataGenerator.next();
bulkCreateItems(newDocuments);
newDocuments.clear();
}
validateDataCreation(_dataGenerator.getGeneratedKeys().size());
}
private void bulkCreateItems(final Map<Key, ObjectNode> records) {
final List<CosmosItemOperation> cosmosItemOperations = mapToCosmosItemOperation(records);
final String containerName = _configuration.getCollectionId();
final CosmosAsyncDatabase database = _client.getDatabase(_configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(containerName);
LOGGER.info("Bulk loading {} documents in [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
final BulkProcessingOptions<Object> bulkProcessingOptions = new BulkProcessingOptions<>(Object.class);
bulkProcessingOptions.setMaxMicroBatchSize(MAX_BATCH_SIZE)
.setMaxMicroBatchConcurrency(BULK_OPERATION_CONCURRENCY);
container.processBulkOperations(Flux.fromIterable(cosmosItemOperations), bulkProcessingOptions)
.blockLast(BULK_LOAD_WAIT_DURATION);
LOGGER.info("Completed loading {} documents into [{}:{}]", cosmosItemOperations.size(),
database.getId(),
containerName);
}
/**
* Map the generated data to createItem requests in the underlying container
*
* @param records Data we want to load into the container
* @return List of CosmosItemOperation, each mapping to a createItem for that record
*/
private List<CosmosItemOperation> mapToCosmosItemOperation(final Map<Key, ObjectNode> records) {
return records.entrySet()
.stream()
.map(record -> {
final String partitionKey = record.getKey().getPartitioningKey();
final ObjectNode value = record.getValue();
return BulkOperations.getCreateItemOperation(value, new PartitionKey(partitionKey));
})
.collect(Collectors.toList());
}
/**
* @return Set of Keys representing each document loaded into the test collection
*/
public Set<Key> getLoadedDataKeys() {
return _dataGenerator.getGeneratedKeys();
}
} |
What is the motivation here ? We should not stop the run on fixed error count, instead let the run complete and analysis the result later | public void run(final Set<Key> testKeys) {
final ArrayList<Key> keys = new ArrayList<>(testKeys);
Collections.shuffle(keys);
final long runStartTime = System.currentTimeMillis();
long i = 0;
for (; BenchmarkHelper.shouldContinue(runStartTime, i, _configuration); i++) {
if (exceedsErrorThreshold()) {
return;
}
int index = (int) ((i % keys.size()) % Integer.MAX_VALUE);
final Key key = keys.get(index);
try {
_semaphore.acquire();
} catch (InterruptedException e) {
_errorCount.incrementAndGet();
continue;
}
_executorService.submit(() -> runOperation(key));
}
final Instant runEndTime = Instant.now();
LOGGER.info("Number of iterations: {}, Errors: {}, Runtime: {} millis",
_successCount.get(),
_errorCount.get(),
runEndTime.minusMillis(runStartTime).toEpochMilli());
} | if (exceedsErrorThreshold()) { | public void run(final Set<Key> testKeys) {
final ArrayList<Key> keys = new ArrayList<>(testKeys);
Collections.shuffle(keys);
final long runStartTime = System.currentTimeMillis();
long i = 0;
for (; BenchmarkHelper.shouldContinue(runStartTime, i, _configuration); i++) {
int index = (int) ((i % keys.size()) % Integer.MAX_VALUE);
final Key key = keys.get(index);
try {
_semaphore.acquire();
} catch (InterruptedException e) {
_errorCount.incrementAndGet();
continue;
}
_executorService.submit(() -> runOperation(key));
}
final Instant runEndTime = Instant.now();
LOGGER.info("Number of iterations: {}, Errors: {}, Runtime: {} millis",
_successCount.get(),
_errorCount.get(),
runEndTime.minusMillis(runStartTime).toEpochMilli());
} | class GetTestRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(GetTestRunner.class);
private static final Duration TERMINATION_WAIT_DURATION = Duration.ofSeconds(60);
private static final int ERROR_COUNT_TOLERANCE = 2333;
private final Configuration _configuration;
private final Accessor<Key, JsonNode> _accessor;
private final ExecutorService _executorService;
private final AtomicLong _successCount;
private final AtomicLong _errorCount;
private final Semaphore _semaphore;
GetTestRunner(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
Preconditions.checkNotNull(client,
"Need a non-null client for setting up the Database and containers for the test");
Preconditions.checkNotNull(metricsRegistry,
"The MetricsRegistry can not be null");
_configuration = configuration;
_accessor = createAccessor(configuration, client, metricsRegistry);
_executorService = Executors.newFixedThreadPool(configuration.getConcurrency());
_successCount = new AtomicLong(0);
_errorCount = new AtomicLong(0);
_semaphore = new Semaphore(configuration.getConcurrency());
}
public void cleanup() {
try {
_executorService.awaitTermination(TERMINATION_WAIT_DURATION.getSeconds(), TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error("Error awaiting the completion of all tasks", e);
}
_executorService.shutdown();
}
private boolean exceedsErrorThreshold() {
if (_errorCount.get() > ERROR_COUNT_TOLERANCE) {
LOGGER.error("Received {} errors; terminating the run", ERROR_COUNT_TOLERANCE);
return true;
}
return false;
}
private void runOperation(final Key key) {
try {
_accessor.get(key, GetRequestOptions.EMPTY_REQUEST_OPTIONS);
_successCount.getAndIncrement();
} catch (AccessorException e) {
LOGGER.error("Received running exception", e);
_errorCount.getAndIncrement();
} finally {
_semaphore.release();
}
}
private Accessor<Key, JsonNode> createAccessor(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
final StaticDataLocator dataLocator = createDataLocator(configuration, client);
final KeyExtractor<Key> keyExtractor = new KeyExtractorImpl();
final DocumentTransformer<JsonNode, JsonNode> documentTransformer = new IdentityDocumentTransformer<>();
final Clock clock = Clock.systemUTC();
return new CosmosDBDataAccessor<>(dataLocator,
keyExtractor,
new ResponseHandler<>(documentTransformer, keyExtractor),
new MetricsFactory(metricsRegistry, clock),
clock,
new OperationsLogger(Duration.ofSeconds(10)));
}
private StaticDataLocator createDataLocator(Configuration configuration, CosmosAsyncClient client) {
final CollectionKey collectionKey = new CollectionKey(configuration.getServiceEndpoint(),
configuration.getDatabaseId(),
configuration.getCollectionId());
final CosmosAsyncDatabase database = client.getDatabase(configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(configuration.getCollectionId());
return new StaticDataLocator(collectionKey, container);
}
} | class GetTestRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(GetTestRunner.class);
private static final Duration TERMINATION_WAIT_DURATION = Duration.ofSeconds(60);
private final Configuration _configuration;
private final Accessor<Key, JsonNode> _accessor;
private final ExecutorService _executorService;
private final AtomicLong _successCount;
private final AtomicLong _errorCount;
private final Semaphore _semaphore;
GetTestRunner(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
Preconditions.checkNotNull(client,
"Need a non-null client for setting up the Database and containers for the test");
Preconditions.checkNotNull(metricsRegistry,
"The MetricsRegistry can not be null");
_configuration = configuration;
_accessor = createAccessor(configuration, client, metricsRegistry);
_executorService = Executors.newFixedThreadPool(configuration.getConcurrency());
_successCount = new AtomicLong(0);
_errorCount = new AtomicLong(0);
_semaphore = new Semaphore(configuration.getConcurrency());
}
public void cleanup() {
try {
_executorService.awaitTermination(TERMINATION_WAIT_DURATION.getSeconds(), TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error("Error awaiting the completion of all tasks", e);
}
_executorService.shutdown();
}
private void runOperation(final Key key) {
try {
_accessor.get(key, GetRequestOptions.EMPTY_REQUEST_OPTIONS);
_successCount.getAndIncrement();
} catch (AccessorException e) {
LOGGER.error("Received running exception", e);
_errorCount.getAndIncrement();
} finally {
_semaphore.release();
}
}
private Accessor<Key, JsonNode> createAccessor(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
final StaticDataLocator dataLocator = createDataLocator(configuration, client);
final KeyExtractor<Key> keyExtractor = new KeyExtractorImpl();
final DocumentTransformer<JsonNode, JsonNode> documentTransformer = new IdentityDocumentTransformer<>();
final Clock clock = Clock.systemUTC();
return new CosmosDBDataAccessor<>(dataLocator,
keyExtractor,
new ResponseHandler<>(documentTransformer, keyExtractor),
new MetricsFactory(metricsRegistry, clock),
clock,
new OperationsLogger(Duration.ofSeconds(10)));
}
private StaticDataLocator createDataLocator(Configuration configuration, CosmosAsyncClient client) {
final CollectionKey collectionKey = new CollectionKey(configuration.getServiceEndpoint(),
configuration.getDatabaseId(),
configuration.getCollectionId());
final CosmosAsyncDatabase database = client.getDatabase(configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(configuration.getCollectionId());
return new StaticDataLocator(collectionKey, container);
}
} |
After a few runs, I realize it's not adding much value. The error stats are counting and reported separately, so removing it. | public void run(final Set<Key> testKeys) {
final ArrayList<Key> keys = new ArrayList<>(testKeys);
Collections.shuffle(keys);
final long runStartTime = System.currentTimeMillis();
long i = 0;
for (; BenchmarkHelper.shouldContinue(runStartTime, i, _configuration); i++) {
if (exceedsErrorThreshold()) {
return;
}
int index = (int) ((i % keys.size()) % Integer.MAX_VALUE);
final Key key = keys.get(index);
try {
_semaphore.acquire();
} catch (InterruptedException e) {
_errorCount.incrementAndGet();
continue;
}
_executorService.submit(() -> runOperation(key));
}
final Instant runEndTime = Instant.now();
LOGGER.info("Number of iterations: {}, Errors: {}, Runtime: {} millis",
_successCount.get(),
_errorCount.get(),
runEndTime.minusMillis(runStartTime).toEpochMilli());
} | if (exceedsErrorThreshold()) { | public void run(final Set<Key> testKeys) {
final ArrayList<Key> keys = new ArrayList<>(testKeys);
Collections.shuffle(keys);
final long runStartTime = System.currentTimeMillis();
long i = 0;
for (; BenchmarkHelper.shouldContinue(runStartTime, i, _configuration); i++) {
int index = (int) ((i % keys.size()) % Integer.MAX_VALUE);
final Key key = keys.get(index);
try {
_semaphore.acquire();
} catch (InterruptedException e) {
_errorCount.incrementAndGet();
continue;
}
_executorService.submit(() -> runOperation(key));
}
final Instant runEndTime = Instant.now();
LOGGER.info("Number of iterations: {}, Errors: {}, Runtime: {} millis",
_successCount.get(),
_errorCount.get(),
runEndTime.minusMillis(runStartTime).toEpochMilli());
} | class GetTestRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(GetTestRunner.class);
private static final Duration TERMINATION_WAIT_DURATION = Duration.ofSeconds(60);
private static final int ERROR_COUNT_TOLERANCE = 2333;
private final Configuration _configuration;
private final Accessor<Key, JsonNode> _accessor;
private final ExecutorService _executorService;
private final AtomicLong _successCount;
private final AtomicLong _errorCount;
private final Semaphore _semaphore;
GetTestRunner(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
Preconditions.checkNotNull(client,
"Need a non-null client for setting up the Database and containers for the test");
Preconditions.checkNotNull(metricsRegistry,
"The MetricsRegistry can not be null");
_configuration = configuration;
_accessor = createAccessor(configuration, client, metricsRegistry);
_executorService = Executors.newFixedThreadPool(configuration.getConcurrency());
_successCount = new AtomicLong(0);
_errorCount = new AtomicLong(0);
_semaphore = new Semaphore(configuration.getConcurrency());
}
public void cleanup() {
try {
_executorService.awaitTermination(TERMINATION_WAIT_DURATION.getSeconds(), TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error("Error awaiting the completion of all tasks", e);
}
_executorService.shutdown();
}
private boolean exceedsErrorThreshold() {
if (_errorCount.get() > ERROR_COUNT_TOLERANCE) {
LOGGER.error("Received {} errors; terminating the run", ERROR_COUNT_TOLERANCE);
return true;
}
return false;
}
private void runOperation(final Key key) {
try {
_accessor.get(key, GetRequestOptions.EMPTY_REQUEST_OPTIONS);
_successCount.getAndIncrement();
} catch (AccessorException e) {
LOGGER.error("Received running exception", e);
_errorCount.getAndIncrement();
} finally {
_semaphore.release();
}
}
private Accessor<Key, JsonNode> createAccessor(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
final StaticDataLocator dataLocator = createDataLocator(configuration, client);
final KeyExtractor<Key> keyExtractor = new KeyExtractorImpl();
final DocumentTransformer<JsonNode, JsonNode> documentTransformer = new IdentityDocumentTransformer<>();
final Clock clock = Clock.systemUTC();
return new CosmosDBDataAccessor<>(dataLocator,
keyExtractor,
new ResponseHandler<>(documentTransformer, keyExtractor),
new MetricsFactory(metricsRegistry, clock),
clock,
new OperationsLogger(Duration.ofSeconds(10)));
}
private StaticDataLocator createDataLocator(Configuration configuration, CosmosAsyncClient client) {
final CollectionKey collectionKey = new CollectionKey(configuration.getServiceEndpoint(),
configuration.getDatabaseId(),
configuration.getCollectionId());
final CosmosAsyncDatabase database = client.getDatabase(configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(configuration.getCollectionId());
return new StaticDataLocator(collectionKey, container);
}
} | class GetTestRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(GetTestRunner.class);
private static final Duration TERMINATION_WAIT_DURATION = Duration.ofSeconds(60);
private final Configuration _configuration;
private final Accessor<Key, JsonNode> _accessor;
private final ExecutorService _executorService;
private final AtomicLong _successCount;
private final AtomicLong _errorCount;
private final Semaphore _semaphore;
GetTestRunner(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
Preconditions.checkNotNull(configuration,
"The Workload configuration defining the parameters can not be null");
Preconditions.checkNotNull(client,
"Need a non-null client for setting up the Database and containers for the test");
Preconditions.checkNotNull(metricsRegistry,
"The MetricsRegistry can not be null");
_configuration = configuration;
_accessor = createAccessor(configuration, client, metricsRegistry);
_executorService = Executors.newFixedThreadPool(configuration.getConcurrency());
_successCount = new AtomicLong(0);
_errorCount = new AtomicLong(0);
_semaphore = new Semaphore(configuration.getConcurrency());
}
public void cleanup() {
try {
_executorService.awaitTermination(TERMINATION_WAIT_DURATION.getSeconds(), TimeUnit.SECONDS);
} catch (InterruptedException e) {
LOGGER.error("Error awaiting the completion of all tasks", e);
}
_executorService.shutdown();
}
private void runOperation(final Key key) {
try {
_accessor.get(key, GetRequestOptions.EMPTY_REQUEST_OPTIONS);
_successCount.getAndIncrement();
} catch (AccessorException e) {
LOGGER.error("Received running exception", e);
_errorCount.getAndIncrement();
} finally {
_semaphore.release();
}
}
private Accessor<Key, JsonNode> createAccessor(final Configuration configuration,
final CosmosAsyncClient client,
final MetricRegistry metricsRegistry) {
final StaticDataLocator dataLocator = createDataLocator(configuration, client);
final KeyExtractor<Key> keyExtractor = new KeyExtractorImpl();
final DocumentTransformer<JsonNode, JsonNode> documentTransformer = new IdentityDocumentTransformer<>();
final Clock clock = Clock.systemUTC();
return new CosmosDBDataAccessor<>(dataLocator,
keyExtractor,
new ResponseHandler<>(documentTransformer, keyExtractor),
new MetricsFactory(metricsRegistry, clock),
clock,
new OperationsLogger(Duration.ofSeconds(10)));
}
private StaticDataLocator createDataLocator(Configuration configuration, CosmosAsyncClient client) {
final CollectionKey collectionKey = new CollectionKey(configuration.getServiceEndpoint(),
configuration.getDatabaseId(),
configuration.getCollectionId());
final CosmosAsyncDatabase database = client.getDatabase(configuration.getDatabaseId());
final CosmosAsyncContainer container = database.getContainer(configuration.getCollectionId());
return new StaticDataLocator(collectionKey, container);
}
} |
This will make code easier to understand: ``` private String getWebDriverCachePath() { String springPath = this.getClass().getResource(("")).getPath(); String springPathSuffix = File.separator + "sdk" + File.separator + "spring"; while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) { springPath = new File(springPath).getParent(); } return springPath + File.separator + WEB_DRIVER_FOLDER_NAME; } ``` | private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
} | } | private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(destination);
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(String path) {
File file = new File(path);
if (!file.exists()) {
logger.warn("Path " + path + " does not exist!");
return;
}
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + path);
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
setPathExecutableRecursively(file.getAbsolutePath());
}
}
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
}
} |
Rename `destination` to `webDriverCachePath`. | protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(destination);
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
} | String destination = getWebDriverCachePath(); | protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(String path) {
File file = new File(path);
if (!file.exists()) {
logger.warn("Path " + path + " does not exist!");
return;
}
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + path);
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
setPathExecutableRecursively(file.getAbsolutePath());
}
}
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
}
} |
There should be a space after `}`. | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | }else { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
Can we delete this check? Refs: https://stackoverflow.com/questions/24845281/how-does-a-for-each-loop-guard-against-an-empty-list | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | if (null != files && files.length > 0) { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
for (File f: file.listFiles()) | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | for (File f : files) { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
Can we delete this check? | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | if (!file.exists()) { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
I am very happy to be working with so many geniuses | private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
} | } | private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(destination);
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(String path) {
File file = new File(path);
if (!file.exists()) {
logger.warn("Path " + path + " does not exist!");
return;
}
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + path);
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
setPathExecutableRecursively(file.getAbsolutePath());
}
}
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
}
} |
u r right | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | for (File f : files) { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
in this project, yes , we can delete it, but I recommend to reserve it. | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
File[] files = file.listFiles();
if (null != files && files.length > 0) {
for (File f : files) {
setPathExecutableRecursively(f);
}
}
}else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | if (!file.exists()) { | private void setPathExecutableRecursively(File file) {
if (!file.exists()) {
logger.warn("Path " + file + " does not exist!");
return;
}
if (file.isDirectory()) {
for (File f : file.listFiles()) {
setPathExecutableRecursively(f);
}
} else {
if (!file.setExecutable(true)) {
logger.error("Failed to set executable for " + file);
}
}
} | class SeleniumITHelper {
Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
public static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String destination = getWebDriverCachePath();
setPathExecutableRecursively(new File(destination));
System.setProperty("wdm.cachePath", destination);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String currentPath = this.getClass().getResource(("")).getPath();
String sdkSpring = File.separator + "sdk" + File.separator + "spring";
String destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
while (StringUtils.isNotEmpty(currentPath)) {
if (StringUtils.endsWith(currentPath, sdkSpring)) {
destination = currentPath + File.separator + WEB_DRIVER_FOLDER_NAME;
break;
} else {
currentPath = new File(currentPath).getParent();
}
}
return destination;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} | class SeleniumITHelper {
private static Logger logger = LoggerFactory.getLogger(SeleniumITHelper.class);
private static String WEB_DRIVER_FOLDER_NAME = "webdriver";
protected AppRunner app;
protected WebDriver driver;
protected WebDriverWait wait;
public SeleniumITHelper(Class<?> appClass, Map<String, String> properties) {
createDriver();
createAppRunner(appClass, properties);
}
protected void createDriver() {
if (driver == null) {
String webDriverCachePath = getWebDriverCachePath();
setPathExecutableRecursively(new File(webDriverCachePath));
System.setProperty("wdm.cachePath", webDriverCachePath);
WebDriverManager.chromedriver().setup();
ChromeOptions options = new ChromeOptions();
options.addArguments("--headless");
options.addArguments("--incognito", "--no-sandbox", "--disable-dev-shm-usage");
driver = new ChromeDriver(options);
wait = new WebDriverWait(driver, 10);
}
}
private String getWebDriverCachePath() {
String springPath = this.getClass().getResource(("")).getPath();
String springPathSuffix = File.separator + "sdk" + File.separator + "spring";
while (StringUtils.isNotEmpty(springPath) && !springPath.endsWith(springPathSuffix)) {
springPath = new File(springPath).getParent();
}
return springPath + File.separator + WEB_DRIVER_FOLDER_NAME;
}
protected void createAppRunner(Class<?> appClass, Map<String, String> properties) {
app = new AppRunner(appClass);
properties.forEach(app::property);
app.start();
}
/**
* Manually invoke destroy to complete resource release.
*/
public void destroy() {
driver.quit();
app.close();
}
} |
Should we make this `getNonNullStringIndexType` return StringIndexTypeResponse.fromString( directly from the method, that way we can reduce the duplicate lines. | private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setStringIndexType(StringIndexTypeResponse.fromString(
getNonNullStringIndexType(action.getStringIndexType()).toString())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString()))
.setStringIndexType(StringIndexTypeResponse.fromString(
getNonNullStringIndexType(action.getStringIndexType()).toString()))
);
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
);
return keyPhrasesTask;
}).collect(Collectors.toList()));
} | getNonNullStringIndexType(action.getStringIndexType()).toString()))); | private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setStringIndexType(getNonNullStringIndexTypeResponse(action.getStringIndexType())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString()))
.setStringIndexType(getNonNullStringIndexTypeResponse(action.getStringIndexType()))
);
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
);
return keyPhrasesTask;
}).collect(Collectors.toList()));
} | class AnalyzeBatchActionsAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = null;
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = null;
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = null;
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
recognizeEntitiesActionResults = IterableStream.of(entityRecognitionTasksItems.stream()
.map(taskItem -> {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
recognizePiiEntitiesActionResults = IterableStream.of(piiTasksItems.stream()
.map(taskItem -> {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
extractKeyPhrasesActionResults = IterableStream.of(keyPhraseExtractionTasks.stream()
.map(taskItem -> {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
return actionResult;
})
.collect(Collectors.toList()));
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
recognizeEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
extractKeyPhrasesActionResults);
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private StringIndexType getNonNullStringIndexType(StringIndexType stringIndexType) {
return stringIndexType == null ? StringIndexType.UTF16CODE_UNIT : stringIndexType;
}
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET =
"
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context)
.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput, finalContext)
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context)
.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput, finalContext)
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
}
private StringIndexTypeResponse getNonNullStringIndexTypeResponse(StringIndexType stringIndexType) {
return StringIndexTypeResponse.fromString(
stringIndexType == null ? StringIndexType.UTF16CODE_UNIT.toString()
: stringIndexType.toString());
}
} |
no biggy, but is not it better to just use `rawId` instead of `getRawId()`? | public boolean equals(Object that) {
if (this == that) {
return true;
}
if (!(that instanceof MicrosoftTeamsUserIdentifier)) {
return false;
}
MicrosoftTeamsUserIdentifier thatId = (MicrosoftTeamsUserIdentifier) that;
if (!thatId.getUserId().equals(this.getUserId())
|| thatId.isAnonymous != this.isAnonymous) {
return false;
}
if (cloudEnvironment != null && !cloudEnvironment.equals(thatId.cloudEnvironment)) {
return false;
}
if (thatId.cloudEnvironment != null && !thatId.cloudEnvironment.equals(this.cloudEnvironment)) {
return false;
}
return getRawId() == null
|| thatId.getRawId() == null
|| thatId.getRawId().equals(this.getRawId());
} | return getRawId() == null | public boolean equals(Object that) {
if (this == that) {
return true;
}
if (!(that instanceof MicrosoftTeamsUserIdentifier)) {
return false;
}
MicrosoftTeamsUserIdentifier thatId = (MicrosoftTeamsUserIdentifier) that;
if (!thatId.getUserId().equals(this.getUserId())
|| thatId.isAnonymous != this.isAnonymous) {
return false;
}
if (cloudEnvironment != null && !cloudEnvironment.equals(thatId.cloudEnvironment)) {
return false;
}
if (thatId.cloudEnvironment != null && !thatId.cloudEnvironment.equals(this.cloudEnvironment)) {
return false;
}
return getRawId() == null
|| thatId.getRawId() == null
|| thatId.getRawId().equals(this.getRawId());
} | class MicrosoftTeamsUserIdentifier extends CommunicationIdentifier {
private final String userId;
private final boolean isAnonymous;
private CommunicationCloudEnvironment cloudEnvironment = CommunicationCloudEnvironment.PUBLIC;
private String rawId;
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @param cloudEnvironment the cloud environment in which this identifier is created
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous, CommunicationCloudEnvironment cloudEnvironment) {
this(userId, isAnonymous);
this.cloudEnvironment = cloudEnvironment;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous) {
if (CoreUtils.isNullOrEmpty(userId)) {
throw new IllegalArgumentException("The initialization parameter [userId] cannot be null or empty.");
}
this.userId = userId;
this.isAnonymous = isAnonymous;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId) {
this(userId, false);
}
/**
* Get Teams User Id
* @return userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
*/
public String getUserId() {
return this.userId;
}
/**
* @return True if the user is anonymous, for example when joining a meeting with a share link.
*/
public boolean isAnonymous() {
return this.isAnonymous;
}
/**
* Set cloud environment of the Teams user identifier
* @param cloudEnvironment the cloud environment in which this identifier is created
* @return this object
*/
public MicrosoftTeamsUserIdentifier setCloudEnvironment(CommunicationCloudEnvironment cloudEnvironment) {
this.cloudEnvironment = cloudEnvironment;
return this;
}
/**
* Get cloud environment of the Teams user identifier
* @return cloud environment in which this identifier is created
*/
public CommunicationCloudEnvironment getCloudEnvironment() {
return cloudEnvironment;
}
/**
* Get full id of the identifier. This id is optional.
* @return full id of the identifier
*/
public String getRawId() {
return rawId;
}
@Override
public String getId() {
return getRawId();
}
/**
* Set full id of the identifier
* @param rawId full id of the identifier
* @return CommunicationIdentifier object itself
*/
public MicrosoftTeamsUserIdentifier setRawId(String rawId) {
this.rawId = rawId;
return this;
}
@Override
@Override
public int hashCode() {
return userId.hashCode();
}
} | class MicrosoftTeamsUserIdentifier extends CommunicationIdentifier {
private final String userId;
private final boolean isAnonymous;
private CommunicationCloudEnvironment cloudEnvironment = CommunicationCloudEnvironment.PUBLIC;
private String rawId;
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @param cloudEnvironment the cloud environment in which this identifier is created
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous, CommunicationCloudEnvironment cloudEnvironment) {
this(userId, isAnonymous);
this.cloudEnvironment = cloudEnvironment;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous) {
if (CoreUtils.isNullOrEmpty(userId)) {
throw new IllegalArgumentException("The initialization parameter [userId] cannot be null or empty.");
}
this.userId = userId;
this.isAnonymous = isAnonymous;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId) {
this(userId, false);
}
/**
* Get Teams User Id
* @return userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
*/
public String getUserId() {
return this.userId;
}
/**
* @return True if the user is anonymous, for example when joining a meeting with a share link.
*/
public boolean isAnonymous() {
return this.isAnonymous;
}
/**
* Set cloud environment of the Teams user identifier
* @param cloudEnvironment the cloud environment in which this identifier is created
* @return this object
*/
public MicrosoftTeamsUserIdentifier setCloudEnvironment(CommunicationCloudEnvironment cloudEnvironment) {
this.cloudEnvironment = cloudEnvironment;
return this;
}
/**
* Get cloud environment of the Teams user identifier
* @return cloud environment in which this identifier is created
*/
public CommunicationCloudEnvironment getCloudEnvironment() {
return cloudEnvironment;
}
/**
* Get full id of the identifier. This id is optional.
* @return full id of the identifier
*/
public String getRawId() {
return rawId;
}
/**
* Set full id of the identifier
* @param rawId full id of the identifier
* @return CommunicationIdentifier object itself
*/
public MicrosoftTeamsUserIdentifier setRawId(String rawId) {
this.rawId = rawId;
return this;
}
@Override
@Override
public int hashCode() {
return userId.hashCode();
}
} |
It is Java. Short names are not acceptable. :-) By Java convention, get/set Noun is the convention. I don't want to get into religious arguments. | public boolean equals(Object that) {
if (this == that) {
return true;
}
if (!(that instanceof MicrosoftTeamsUserIdentifier)) {
return false;
}
MicrosoftTeamsUserIdentifier thatId = (MicrosoftTeamsUserIdentifier) that;
if (!thatId.getUserId().equals(this.getUserId())
|| thatId.isAnonymous != this.isAnonymous) {
return false;
}
if (cloudEnvironment != null && !cloudEnvironment.equals(thatId.cloudEnvironment)) {
return false;
}
if (thatId.cloudEnvironment != null && !thatId.cloudEnvironment.equals(this.cloudEnvironment)) {
return false;
}
return getRawId() == null
|| thatId.getRawId() == null
|| thatId.getRawId().equals(this.getRawId());
} | return getRawId() == null | public boolean equals(Object that) {
if (this == that) {
return true;
}
if (!(that instanceof MicrosoftTeamsUserIdentifier)) {
return false;
}
MicrosoftTeamsUserIdentifier thatId = (MicrosoftTeamsUserIdentifier) that;
if (!thatId.getUserId().equals(this.getUserId())
|| thatId.isAnonymous != this.isAnonymous) {
return false;
}
if (cloudEnvironment != null && !cloudEnvironment.equals(thatId.cloudEnvironment)) {
return false;
}
if (thatId.cloudEnvironment != null && !thatId.cloudEnvironment.equals(this.cloudEnvironment)) {
return false;
}
return getRawId() == null
|| thatId.getRawId() == null
|| thatId.getRawId().equals(this.getRawId());
} | class MicrosoftTeamsUserIdentifier extends CommunicationIdentifier {
private final String userId;
private final boolean isAnonymous;
private CommunicationCloudEnvironment cloudEnvironment = CommunicationCloudEnvironment.PUBLIC;
private String rawId;
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @param cloudEnvironment the cloud environment in which this identifier is created
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous, CommunicationCloudEnvironment cloudEnvironment) {
this(userId, isAnonymous);
this.cloudEnvironment = cloudEnvironment;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous) {
if (CoreUtils.isNullOrEmpty(userId)) {
throw new IllegalArgumentException("The initialization parameter [userId] cannot be null or empty.");
}
this.userId = userId;
this.isAnonymous = isAnonymous;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId) {
this(userId, false);
}
/**
* Get Teams User Id
* @return userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
*/
public String getUserId() {
return this.userId;
}
/**
* @return True if the user is anonymous, for example when joining a meeting with a share link.
*/
public boolean isAnonymous() {
return this.isAnonymous;
}
/**
* Set cloud environment of the Teams user identifier
* @param cloudEnvironment the cloud environment in which this identifier is created
* @return this object
*/
public MicrosoftTeamsUserIdentifier setCloudEnvironment(CommunicationCloudEnvironment cloudEnvironment) {
this.cloudEnvironment = cloudEnvironment;
return this;
}
/**
* Get cloud environment of the Teams user identifier
* @return cloud environment in which this identifier is created
*/
public CommunicationCloudEnvironment getCloudEnvironment() {
return cloudEnvironment;
}
/**
* Get full id of the identifier. This id is optional.
* @return full id of the identifier
*/
public String getRawId() {
return rawId;
}
@Override
public String getId() {
return getRawId();
}
/**
* Set full id of the identifier
* @param rawId full id of the identifier
* @return CommunicationIdentifier object itself
*/
public MicrosoftTeamsUserIdentifier setRawId(String rawId) {
this.rawId = rawId;
return this;
}
@Override
@Override
public int hashCode() {
return userId.hashCode();
}
} | class MicrosoftTeamsUserIdentifier extends CommunicationIdentifier {
private final String userId;
private final boolean isAnonymous;
private CommunicationCloudEnvironment cloudEnvironment = CommunicationCloudEnvironment.PUBLIC;
private String rawId;
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @param cloudEnvironment the cloud environment in which this identifier is created
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous, CommunicationCloudEnvironment cloudEnvironment) {
this(userId, isAnonymous);
this.cloudEnvironment = cloudEnvironment;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @param isAnonymous set this to true if the user is anonymous,
* for example when joining a meeting with a share link
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId, boolean isAnonymous) {
if (CoreUtils.isNullOrEmpty(userId)) {
throw new IllegalArgumentException("The initialization parameter [userId] cannot be null or empty.");
}
this.userId = userId;
this.isAnonymous = isAnonymous;
}
/**
* Creates a MicrosoftTeamsUserIdentifier object
*
* @param userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
* @throws IllegalArgumentException thrown if userId parameter fail the validation.
*/
public MicrosoftTeamsUserIdentifier(String userId) {
this(userId, false);
}
/**
* Get Teams User Id
* @return userId Id of the Microsoft Teams user. If the user isn't anonymous, the id is the AAD object id of the user.
*/
public String getUserId() {
return this.userId;
}
/**
* @return True if the user is anonymous, for example when joining a meeting with a share link.
*/
public boolean isAnonymous() {
return this.isAnonymous;
}
/**
* Set cloud environment of the Teams user identifier
* @param cloudEnvironment the cloud environment in which this identifier is created
* @return this object
*/
public MicrosoftTeamsUserIdentifier setCloudEnvironment(CommunicationCloudEnvironment cloudEnvironment) {
this.cloudEnvironment = cloudEnvironment;
return this;
}
/**
* Get cloud environment of the Teams user identifier
* @return cloud environment in which this identifier is created
*/
public CommunicationCloudEnvironment getCloudEnvironment() {
return cloudEnvironment;
}
/**
* Get full id of the identifier. This id is optional.
* @return full id of the identifier
*/
public String getRawId() {
return rawId;
}
/**
* Set full id of the identifier
* @param rawId full id of the identifier
* @return CommunicationIdentifier object itself
*/
public MicrosoftTeamsUserIdentifier setRawId(String rawId) {
this.rawId = rawId;
return this;
}
@Override
@Override
public int hashCode() {
return userId.hashCode();
}
} |
Do we want to do the same thing as above where we use a `orElse`? | private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY);
if (!tracingContext.isPresent() || (disableTracing.isPresent() && disableTracing.get())) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
} | Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY); | private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
boolean disableTracing = context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false);
if (!tracingContext.isPresent() || disableTracing) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
} | class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
} | class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
} |
Should this be `orElse(false)` , in the cases when a user doesn't want to disable explicitly and have it enabled by default? | private Context startTracingSpan(Method method, Context context) {
boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(true);
if (!TracerProxy.isTracingEnabled() || disableTracing) {
return context;
}
String spanName = String.format("%s.%s", interfaceParser.getServiceName(), method.getName());
context = TracerProxy.setSpanName(spanName, context);
return TracerProxy.start(spanName, context);
} | boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(true); | private Context startTracingSpan(Method method, Context context) {
boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false);
if (!TracerProxy.isTracingEnabled() || disableTracing) {
return context;
}
String spanName = String.format("%s.%s", interfaceParser.getServiceName(), method.getName());
context = TracerProxy.setSpanName(spanName, context);
return TracerProxy.start(spanName, context);
} | class RestProxy implements InvocationHandler {
private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0);
private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes.";
private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes.";
private final ClientLogger logger = new ClientLogger(RestProxy.class);
private final HttpPipeline httpPipeline;
private final SerializerAdapter serializer;
private final SwaggerInterfaceParser interfaceParser;
private final HttpResponseDecoder decoder;
private final ResponseConstructorsCache responseConstructorsCache;
/**
* Create a RestProxy.
*
* @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests.
* @param serializer the serializer that will be used to convert response bodies to POJOs.
* @param interfaceParser the parser that contains information about the interface describing REST API methods that
* this RestProxy "implements".
*/
private RestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) {
this.httpPipeline = httpPipeline;
this.serializer = serializer;
this.interfaceParser = interfaceParser;
this.decoder = new HttpResponseDecoder(this.serializer);
this.responseConstructorsCache = new ResponseConstructorsCache();
}
/**
* Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this
* RestProxy was created to "implement".
*
* @param method the method to get a SwaggerMethodParser for
* @return the SwaggerMethodParser for the provided method
*/
private SwaggerMethodParser getMethodParser(Method method) {
return interfaceParser.getMethodParser(method);
}
/**
* Send the provided request asynchronously, applying any request policies provided to the HttpClient instance.
*
* @param request the HTTP request to send
* @param contextData the context
* @return a {@link Mono} that emits HttpResponse asynchronously
*/
public Mono<HttpResponse> send(HttpRequest request, Context contextData) {
return httpPipeline.send(request, contextData);
}
@Override
public Object invoke(Object proxy, final Method method, Object[] args) {
try {
if (method.isAnnotationPresent(ResumeOperation.class)) {
throw logger.logExceptionAsError(Exceptions.propagate(
new Exception("The resume operation isn't supported.")));
}
final SwaggerMethodParser methodParser = getMethodParser(method);
final HttpRequest request = createHttpRequest(methodParser, args);
Context context = methodParser.setContext(args)
.addData("caller-method", methodParser.getFullyQualifiedMethodName())
.addData("azure-eagerly-read-response", isReturnTypeDecodable(methodParser.getReturnType()));
context = startTracingSpan(method, context);
if (request.getBody() != null) {
request.setBody(validateLength(request));
}
final Mono<HttpResponse> asyncResponse = send(request, context);
Mono<HttpDecodedResponse> asyncDecodedResponse = this.decoder.decode(asyncResponse, methodParser);
return handleRestReturnType(asyncDecodedResponse, methodParser, methodParser.getReturnType(), context);
} catch (IOException e) {
throw logger.logExceptionAsError(Exceptions.propagate(e));
}
}
static Flux<ByteBuffer> validateLength(final HttpRequest request) {
final Flux<ByteBuffer> bbFlux = request.getBody();
if (bbFlux == null) {
return Flux.empty();
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
return Flux.defer(() -> {
final long[] currentTotalLength = new long[1];
return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> {
if (buffer == null) {
return;
}
if (buffer == VALIDATION_BUFFER) {
if (expectedLength != currentTotalLength[0]) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
} else {
sink.complete();
}
return;
}
currentTotalLength[0] += buffer.remaining();
if (currentTotalLength[0] > expectedLength) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
return;
}
sink.next(buffer);
});
});
}
/**
* Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing
* additional context information.
* @param method Service method being called.
* @param context Context information about the current service call.
*
* @return The updated context containing the span context.
*/
/**
* Create a HttpRequest for the provided Swagger method using the provided arguments.
*
* @param methodParser the Swagger method parser to use
* @param args the arguments to use to populate the method's annotation values
* @return a HttpRequest
* @throws IOException thrown if the body contents cannot be serialized
*/
private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException {
final String path = methodParser.setPath(args);
final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path);
final UrlBuilder urlBuilder;
if (pathUrlBuilder.getScheme() != null) {
urlBuilder = pathUrlBuilder;
} else {
urlBuilder = new UrlBuilder();
methodParser.setSchemeAndHost(args, urlBuilder);
if (path != null && !path.isEmpty() && !"/".equals(path)) {
String hostPath = urlBuilder.getPath();
if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(":
urlBuilder.setPath(path);
} else {
urlBuilder.setPath(hostPath + "/" + path);
}
}
}
methodParser.setEncodedQueryParameters(args, urlBuilder);
final URL url = urlBuilder.toUrl();
final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url),
methodParser, args);
HttpHeaders httpHeaders = request.getHeaders();
methodParser.setHeaders(args, httpHeaders);
return request;
}
@SuppressWarnings("unchecked")
private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser,
final Object[] args) throws IOException {
final Object bodyContentObject = methodParser.setBody(args);
if (bodyContentObject == null) {
request.getHeaders().set("Content-Length", "0");
} else {
String contentType = methodParser.getBodyContentType();
if (contentType == null || contentType.isEmpty()) {
if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) {
contentType = ContentType.APPLICATION_OCTET_STREAM;
} else {
contentType = ContentType.APPLICATION_JSON;
}
}
request.getHeaders().set("Content-Type", contentType);
boolean isJson = false;
final String[] contentTypeParts = contentType.split(";");
for (final String contentTypePart : contentTypeParts) {
if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) {
isJson = true;
break;
}
}
if (isJson) {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
} else if (FluxUtil.isFluxByteBuffer(methodParser.getBodyJavaType())) {
request.setBody((Flux<ByteBuffer>) bodyContentObject);
} else if (bodyContentObject instanceof byte[]) {
request.setBody((byte[]) bodyContentObject);
} else if (bodyContentObject instanceof String) {
final String bodyContentString = (String) bodyContentObject;
if (!bodyContentString.isEmpty()) {
request.setBody(bodyContentString);
}
} else if (bodyContentObject instanceof ByteBuffer) {
request.setBody(Flux.just((ByteBuffer) bodyContentObject));
} else {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
}
}
return request;
}
private Mono<HttpDecodedResponse> ensureExpectedStatus(final Mono<HttpDecodedResponse> asyncDecodedResponse,
final SwaggerMethodParser methodParser) {
return asyncDecodedResponse
.flatMap(decodedHttpResponse -> ensureExpectedStatus(decodedHttpResponse, methodParser));
}
private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception,
final HttpResponse httpResponse,
final byte[] responseContent,
final Object responseDecodedContent) {
final int responseStatusCode = httpResponse.getStatusCode();
final String contentType = httpResponse.getHeaderValue("Content-Type");
final String bodyRepresentation;
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)";
} else {
bodyRepresentation = responseContent == null || responseContent.length == 0
? "(empty body)"
: "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\"";
}
Exception result;
try {
final Constructor<? extends HttpResponseException> exceptionConstructor =
exception.getExceptionType().getConstructor(String.class, HttpResponse.class,
exception.getExceptionBodyType());
result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation,
httpResponse,
responseDecodedContent);
} catch (ReflectiveOperationException e) {
String message = "Status code " + responseStatusCode + ", but an instance of "
+ exception.getExceptionType().getCanonicalName() + " cannot be created."
+ " Response body: " + bodyRepresentation;
result = new IOException(message, e);
}
return result;
}
/**
* Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status
* code' OR (2) emits provided response if it's status code ia allowed.
*
* 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[]
* of additional allowed status codes.
*
* @param decodedResponse The HttpResponse to check.
* @param methodParser The method parser that contains information about the service interface method that initiated
* the HTTP request.
* @return An async-version of the provided decodedResponse.
*/
private Mono<HttpDecodedResponse> ensureExpectedStatus(final HttpDecodedResponse decodedResponse,
final SwaggerMethodParser methodParser) {
final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode();
final Mono<HttpDecodedResponse> asyncResult;
if (!methodParser.isExpectedResponseStatusCode(responseStatusCode)) {
Mono<byte[]> bodyAsBytes = decodedResponse.getSourceResponse().getBodyAsByteArray();
asyncResult = bodyAsBytes.flatMap((Function<byte[], Mono<HttpDecodedResponse>>) responseContent -> {
Mono<Object> decodedErrorBody = decodedResponse.getDecodedBody(responseContent);
return decodedErrorBody
.flatMap((Function<Object, Mono<HttpDecodedResponse>>) responseDecodedErrorObject -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
responseDecodedErrorObject);
return Mono.error(exception);
})
.switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
null);
return Mono.error(exception);
}));
}).switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
null,
null);
return Mono.error(exception);
}));
} else {
asyncResult = Mono.just(decodedResponse);
}
return asyncResult;
}
private Mono<?> handleRestResponseReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser,
final Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) {
final Type bodyType = TypeUtil.getRestResponseBodyType(entityType);
if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) {
return response.getSourceResponse().getBody().ignoreElements()
.then(createResponse(response, entityType, null));
} else {
return handleBodyReturnType(response, methodParser, bodyType)
.flatMap(bodyAsObject -> createResponse(response, entityType, bodyAsObject))
.switchIfEmpty(Mono.defer((Supplier<Mono<Response<?>>>) () -> createResponse(response,
entityType, null)));
}
} else {
return handleBodyReturnType(response, methodParser, entityType);
}
}
@SuppressWarnings("unchecked")
private Mono<Response<?>> createResponse(HttpDecodedResponse response, Type entityType, Object bodyAsObject) {
Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType);
if (cls.equals(Response.class)) {
cls = (Class<? extends Response<?>>) (Object) ResponseBase.class;
} else if (cls.equals(PagedResponse.class)) {
cls = (Class<? extends Response<?>>) (Object) PagedResponseBase.class;
if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) {
throw logger.logExceptionAsError(new RuntimeException(
"Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class));
}
}
Constructor<? extends Response<?>> ctr = this.responseConstructorsCache.get(cls);
if (ctr != null) {
return this.responseConstructorsCache.invoke(ctr, response, bodyAsObject);
} else {
return Mono.error(new RuntimeException("Cannot find suitable constructor for class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
}
/**
* Handle the provided asynchronous HTTP response and return the deserialized value.
*
* @param asyncHttpDecodedResponse the asynchronous HTTP response to the original HTTP request
* @param methodParser the SwaggerMethodParser that the request originates from
* @param returnType the type of value that will be returned
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return the deserialized result
*/
private Object handleRestReturnType(final Mono<HttpDecodedResponse> asyncHttpDecodedResponse,
final SwaggerMethodParser methodParser,
final Type returnType,
final Context context) {
final Mono<HttpDecodedResponse> asyncExpectedResponse =
ensureExpectedStatus(asyncHttpDecodedResponse, methodParser)
.doOnEach(RestProxy::endTracingSpan)
.subscriberContext(reactor.util.context.Context.of("TRACING_CONTEXT", context));
final Object result;
if (TypeUtil.isTypeOrSubTypeOf(returnType, Mono.class)) {
final Type monoTypeParam = TypeUtil.getTypeArgument(returnType);
if (TypeUtil.isTypeOrSubTypeOf(monoTypeParam, Void.class)) {
result = asyncExpectedResponse.then();
} else {
result = asyncExpectedResponse.flatMap(response ->
handleRestResponseReturnType(response, methodParser, monoTypeParam));
}
} else if (FluxUtil.isFluxByteBuffer(returnType)) {
result = asyncExpectedResponse.flatMapMany(ar -> ar.getSourceResponse().getBody());
} else if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType,
Void.class)) {
asyncExpectedResponse.block();
result = null;
} else {
result = asyncExpectedResponse
.flatMap(httpResponse -> handleRestResponseReturnType(httpResponse, methodParser, returnType))
.block();
}
return result;
}
private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY);
if (!tracingContext.isPresent() || (disableTracing.isPresent() && disableTracing.get())) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
}
/**
* Create an instance of the default serializer.
*
* @return the default serializer
*/
private static SerializerAdapter createDefaultSerializer() {
return JacksonAdapter.createDefaultSerializerAdapter();
}
/**
* Create the default HttpPipeline.
*
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline() {
return createDefaultPipeline(null);
}
/**
* Create the default HttpPipeline.
*
* @param credentialsPolicy the credentials policy factory to use to apply authentication to the pipeline
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline(HttpPipelinePolicy credentialsPolicy) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy());
policies.add(new RetryPolicy());
policies.add(new CookiePolicy());
if (credentialsPolicy != null) {
policies.add(credentialsPolicy);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface) {
return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) {
return create(swaggerInterface, httpPipeline, createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests
* @param serializer the serializer that will be used to convert POJOs to and from request and response bodies
* @param <A> the type of the Swagger interface.
* @return a proxy implementation of the provided Swagger interface
*/
@SuppressWarnings("unchecked")
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) {
final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer);
final RestProxy restProxy = new RestProxy(httpPipeline, serializer, interfaceParser);
return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface},
restProxy);
}
} | class RestProxy implements InvocationHandler {
private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0);
private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes.";
private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes.";
private final ClientLogger logger = new ClientLogger(RestProxy.class);
private final HttpPipeline httpPipeline;
private final SerializerAdapter serializer;
private final SwaggerInterfaceParser interfaceParser;
private final HttpResponseDecoder decoder;
private final ResponseConstructorsCache responseConstructorsCache;
/**
* Create a RestProxy.
*
* @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests.
* @param serializer the serializer that will be used to convert response bodies to POJOs.
* @param interfaceParser the parser that contains information about the interface describing REST API methods that
* this RestProxy "implements".
*/
private RestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) {
this.httpPipeline = httpPipeline;
this.serializer = serializer;
this.interfaceParser = interfaceParser;
this.decoder = new HttpResponseDecoder(this.serializer);
this.responseConstructorsCache = new ResponseConstructorsCache();
}
/**
* Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this
* RestProxy was created to "implement".
*
* @param method the method to get a SwaggerMethodParser for
* @return the SwaggerMethodParser for the provided method
*/
private SwaggerMethodParser getMethodParser(Method method) {
return interfaceParser.getMethodParser(method);
}
/**
* Send the provided request asynchronously, applying any request policies provided to the HttpClient instance.
*
* @param request the HTTP request to send
* @param contextData the context
* @return a {@link Mono} that emits HttpResponse asynchronously
*/
public Mono<HttpResponse> send(HttpRequest request, Context contextData) {
return httpPipeline.send(request, contextData);
}
@Override
public Object invoke(Object proxy, final Method method, Object[] args) {
try {
if (method.isAnnotationPresent(ResumeOperation.class)) {
throw logger.logExceptionAsError(Exceptions.propagate(
new Exception("The resume operation isn't supported.")));
}
final SwaggerMethodParser methodParser = getMethodParser(method);
final HttpRequest request = createHttpRequest(methodParser, args);
Context context = methodParser.setContext(args)
.addData("caller-method", methodParser.getFullyQualifiedMethodName())
.addData("azure-eagerly-read-response", isReturnTypeDecodable(methodParser.getReturnType()));
context = startTracingSpan(method, context);
if (request.getBody() != null) {
request.setBody(validateLength(request));
}
final Mono<HttpResponse> asyncResponse = send(request, context);
Mono<HttpDecodedResponse> asyncDecodedResponse = this.decoder.decode(asyncResponse, methodParser);
return handleRestReturnType(asyncDecodedResponse, methodParser, methodParser.getReturnType(), context);
} catch (IOException e) {
throw logger.logExceptionAsError(Exceptions.propagate(e));
}
}
static Flux<ByteBuffer> validateLength(final HttpRequest request) {
final Flux<ByteBuffer> bbFlux = request.getBody();
if (bbFlux == null) {
return Flux.empty();
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
return Flux.defer(() -> {
final long[] currentTotalLength = new long[1];
return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> {
if (buffer == null) {
return;
}
if (buffer == VALIDATION_BUFFER) {
if (expectedLength != currentTotalLength[0]) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
} else {
sink.complete();
}
return;
}
currentTotalLength[0] += buffer.remaining();
if (currentTotalLength[0] > expectedLength) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
return;
}
sink.next(buffer);
});
});
}
/**
* Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing
* additional context information.
* @param method Service method being called.
* @param context Context information about the current service call.
*
* @return The updated context containing the span context.
*/
/**
* Create a HttpRequest for the provided Swagger method using the provided arguments.
*
* @param methodParser the Swagger method parser to use
* @param args the arguments to use to populate the method's annotation values
* @return a HttpRequest
* @throws IOException thrown if the body contents cannot be serialized
*/
private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException {
final String path = methodParser.setPath(args);
final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path);
final UrlBuilder urlBuilder;
if (pathUrlBuilder.getScheme() != null) {
urlBuilder = pathUrlBuilder;
} else {
urlBuilder = new UrlBuilder();
methodParser.setSchemeAndHost(args, urlBuilder);
if (path != null && !path.isEmpty() && !"/".equals(path)) {
String hostPath = urlBuilder.getPath();
if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(":
urlBuilder.setPath(path);
} else {
urlBuilder.setPath(hostPath + "/" + path);
}
}
}
methodParser.setEncodedQueryParameters(args, urlBuilder);
final URL url = urlBuilder.toUrl();
final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url),
methodParser, args);
HttpHeaders httpHeaders = request.getHeaders();
methodParser.setHeaders(args, httpHeaders);
return request;
}
@SuppressWarnings("unchecked")
private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser,
final Object[] args) throws IOException {
final Object bodyContentObject = methodParser.setBody(args);
if (bodyContentObject == null) {
request.getHeaders().set("Content-Length", "0");
} else {
String contentType = methodParser.getBodyContentType();
if (contentType == null || contentType.isEmpty()) {
if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) {
contentType = ContentType.APPLICATION_OCTET_STREAM;
} else {
contentType = ContentType.APPLICATION_JSON;
}
}
request.getHeaders().set("Content-Type", contentType);
boolean isJson = false;
final String[] contentTypeParts = contentType.split(";");
for (final String contentTypePart : contentTypeParts) {
if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) {
isJson = true;
break;
}
}
if (isJson) {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
} else if (FluxUtil.isFluxByteBuffer(methodParser.getBodyJavaType())) {
request.setBody((Flux<ByteBuffer>) bodyContentObject);
} else if (bodyContentObject instanceof byte[]) {
request.setBody((byte[]) bodyContentObject);
} else if (bodyContentObject instanceof String) {
final String bodyContentString = (String) bodyContentObject;
if (!bodyContentString.isEmpty()) {
request.setBody(bodyContentString);
}
} else if (bodyContentObject instanceof ByteBuffer) {
request.setBody(Flux.just((ByteBuffer) bodyContentObject));
} else {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
}
}
return request;
}
private Mono<HttpDecodedResponse> ensureExpectedStatus(final Mono<HttpDecodedResponse> asyncDecodedResponse,
final SwaggerMethodParser methodParser) {
return asyncDecodedResponse
.flatMap(decodedHttpResponse -> ensureExpectedStatus(decodedHttpResponse, methodParser));
}
private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception,
final HttpResponse httpResponse,
final byte[] responseContent,
final Object responseDecodedContent) {
final int responseStatusCode = httpResponse.getStatusCode();
final String contentType = httpResponse.getHeaderValue("Content-Type");
final String bodyRepresentation;
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)";
} else {
bodyRepresentation = responseContent == null || responseContent.length == 0
? "(empty body)"
: "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\"";
}
Exception result;
try {
final Constructor<? extends HttpResponseException> exceptionConstructor =
exception.getExceptionType().getConstructor(String.class, HttpResponse.class,
exception.getExceptionBodyType());
result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation,
httpResponse,
responseDecodedContent);
} catch (ReflectiveOperationException e) {
String message = "Status code " + responseStatusCode + ", but an instance of "
+ exception.getExceptionType().getCanonicalName() + " cannot be created."
+ " Response body: " + bodyRepresentation;
result = new IOException(message, e);
}
return result;
}
/**
* Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status
* code' OR (2) emits provided response if it's status code ia allowed.
*
* 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[]
* of additional allowed status codes.
*
* @param decodedResponse The HttpResponse to check.
* @param methodParser The method parser that contains information about the service interface method that initiated
* the HTTP request.
* @return An async-version of the provided decodedResponse.
*/
private Mono<HttpDecodedResponse> ensureExpectedStatus(final HttpDecodedResponse decodedResponse,
final SwaggerMethodParser methodParser) {
final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode();
final Mono<HttpDecodedResponse> asyncResult;
if (!methodParser.isExpectedResponseStatusCode(responseStatusCode)) {
Mono<byte[]> bodyAsBytes = decodedResponse.getSourceResponse().getBodyAsByteArray();
asyncResult = bodyAsBytes.flatMap((Function<byte[], Mono<HttpDecodedResponse>>) responseContent -> {
Mono<Object> decodedErrorBody = decodedResponse.getDecodedBody(responseContent);
return decodedErrorBody
.flatMap((Function<Object, Mono<HttpDecodedResponse>>) responseDecodedErrorObject -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
responseDecodedErrorObject);
return Mono.error(exception);
})
.switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
null);
return Mono.error(exception);
}));
}).switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
null,
null);
return Mono.error(exception);
}));
} else {
asyncResult = Mono.just(decodedResponse);
}
return asyncResult;
}
private Mono<?> handleRestResponseReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser,
final Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) {
final Type bodyType = TypeUtil.getRestResponseBodyType(entityType);
if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) {
return response.getSourceResponse().getBody().ignoreElements()
.then(createResponse(response, entityType, null));
} else {
return handleBodyReturnType(response, methodParser, bodyType)
.flatMap(bodyAsObject -> createResponse(response, entityType, bodyAsObject))
.switchIfEmpty(Mono.defer((Supplier<Mono<Response<?>>>) () -> createResponse(response,
entityType, null)));
}
} else {
return handleBodyReturnType(response, methodParser, entityType);
}
}
@SuppressWarnings("unchecked")
private Mono<Response<?>> createResponse(HttpDecodedResponse response, Type entityType, Object bodyAsObject) {
Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType);
if (cls.equals(Response.class)) {
cls = (Class<? extends Response<?>>) (Object) ResponseBase.class;
} else if (cls.equals(PagedResponse.class)) {
cls = (Class<? extends Response<?>>) (Object) PagedResponseBase.class;
if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) {
throw logger.logExceptionAsError(new RuntimeException(
"Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class));
}
}
Constructor<? extends Response<?>> ctr = this.responseConstructorsCache.get(cls);
if (ctr != null) {
return this.responseConstructorsCache.invoke(ctr, response, bodyAsObject);
} else {
return Mono.error(new RuntimeException("Cannot find suitable constructor for class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
}
/**
* Handle the provided asynchronous HTTP response and return the deserialized value.
*
* @param asyncHttpDecodedResponse the asynchronous HTTP response to the original HTTP request
* @param methodParser the SwaggerMethodParser that the request originates from
* @param returnType the type of value that will be returned
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return the deserialized result
*/
private Object handleRestReturnType(final Mono<HttpDecodedResponse> asyncHttpDecodedResponse,
final SwaggerMethodParser methodParser,
final Type returnType,
final Context context) {
final Mono<HttpDecodedResponse> asyncExpectedResponse =
ensureExpectedStatus(asyncHttpDecodedResponse, methodParser)
.doOnEach(RestProxy::endTracingSpan)
.subscriberContext(reactor.util.context.Context.of("TRACING_CONTEXT", context));
final Object result;
if (TypeUtil.isTypeOrSubTypeOf(returnType, Mono.class)) {
final Type monoTypeParam = TypeUtil.getTypeArgument(returnType);
if (TypeUtil.isTypeOrSubTypeOf(monoTypeParam, Void.class)) {
result = asyncExpectedResponse.then();
} else {
result = asyncExpectedResponse.flatMap(response ->
handleRestResponseReturnType(response, methodParser, monoTypeParam));
}
} else if (FluxUtil.isFluxByteBuffer(returnType)) {
result = asyncExpectedResponse.flatMapMany(ar -> ar.getSourceResponse().getBody());
} else if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType,
Void.class)) {
asyncExpectedResponse.block();
result = null;
} else {
result = asyncExpectedResponse
.flatMap(httpResponse -> handleRestResponseReturnType(httpResponse, methodParser, returnType))
.block();
}
return result;
}
private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
boolean disableTracing = context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false);
if (!tracingContext.isPresent() || disableTracing) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
}
/**
* Create an instance of the default serializer.
*
* @return the default serializer
*/
private static SerializerAdapter createDefaultSerializer() {
return JacksonAdapter.createDefaultSerializerAdapter();
}
/**
* Create the default HttpPipeline.
*
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline() {
return createDefaultPipeline(null);
}
/**
* Create the default HttpPipeline.
*
* @param credentialsPolicy the credentials policy factory to use to apply authentication to the pipeline
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline(HttpPipelinePolicy credentialsPolicy) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy());
policies.add(new RetryPolicy());
policies.add(new CookiePolicy());
if (credentialsPolicy != null) {
policies.add(credentialsPolicy);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface) {
return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) {
return create(swaggerInterface, httpPipeline, createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests
* @param serializer the serializer that will be used to convert POJOs to and from request and response bodies
* @param <A> the type of the Swagger interface.
* @return a proxy implementation of the provided Swagger interface
*/
@SuppressWarnings("unchecked")
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) {
final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer);
final RestProxy restProxy = new RestProxy(httpPipeline, serializer, interfaceParser);
return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface},
restProxy);
}
} |
Fixed. | private Context startTracingSpan(Method method, Context context) {
boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(true);
if (!TracerProxy.isTracingEnabled() || disableTracing) {
return context;
}
String spanName = String.format("%s.%s", interfaceParser.getServiceName(), method.getName());
context = TracerProxy.setSpanName(spanName, context);
return TracerProxy.start(spanName, context);
} | boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(true); | private Context startTracingSpan(Method method, Context context) {
boolean disableTracing = (boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false);
if (!TracerProxy.isTracingEnabled() || disableTracing) {
return context;
}
String spanName = String.format("%s.%s", interfaceParser.getServiceName(), method.getName());
context = TracerProxy.setSpanName(spanName, context);
return TracerProxy.start(spanName, context);
} | class RestProxy implements InvocationHandler {
private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0);
private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes.";
private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes.";
private final ClientLogger logger = new ClientLogger(RestProxy.class);
private final HttpPipeline httpPipeline;
private final SerializerAdapter serializer;
private final SwaggerInterfaceParser interfaceParser;
private final HttpResponseDecoder decoder;
private final ResponseConstructorsCache responseConstructorsCache;
/**
* Create a RestProxy.
*
* @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests.
* @param serializer the serializer that will be used to convert response bodies to POJOs.
* @param interfaceParser the parser that contains information about the interface describing REST API methods that
* this RestProxy "implements".
*/
private RestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) {
this.httpPipeline = httpPipeline;
this.serializer = serializer;
this.interfaceParser = interfaceParser;
this.decoder = new HttpResponseDecoder(this.serializer);
this.responseConstructorsCache = new ResponseConstructorsCache();
}
/**
* Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this
* RestProxy was created to "implement".
*
* @param method the method to get a SwaggerMethodParser for
* @return the SwaggerMethodParser for the provided method
*/
private SwaggerMethodParser getMethodParser(Method method) {
return interfaceParser.getMethodParser(method);
}
/**
* Send the provided request asynchronously, applying any request policies provided to the HttpClient instance.
*
* @param request the HTTP request to send
* @param contextData the context
* @return a {@link Mono} that emits HttpResponse asynchronously
*/
public Mono<HttpResponse> send(HttpRequest request, Context contextData) {
return httpPipeline.send(request, contextData);
}
@Override
public Object invoke(Object proxy, final Method method, Object[] args) {
try {
if (method.isAnnotationPresent(ResumeOperation.class)) {
throw logger.logExceptionAsError(Exceptions.propagate(
new Exception("The resume operation isn't supported.")));
}
final SwaggerMethodParser methodParser = getMethodParser(method);
final HttpRequest request = createHttpRequest(methodParser, args);
Context context = methodParser.setContext(args)
.addData("caller-method", methodParser.getFullyQualifiedMethodName())
.addData("azure-eagerly-read-response", isReturnTypeDecodable(methodParser.getReturnType()));
context = startTracingSpan(method, context);
if (request.getBody() != null) {
request.setBody(validateLength(request));
}
final Mono<HttpResponse> asyncResponse = send(request, context);
Mono<HttpDecodedResponse> asyncDecodedResponse = this.decoder.decode(asyncResponse, methodParser);
return handleRestReturnType(asyncDecodedResponse, methodParser, methodParser.getReturnType(), context);
} catch (IOException e) {
throw logger.logExceptionAsError(Exceptions.propagate(e));
}
}
static Flux<ByteBuffer> validateLength(final HttpRequest request) {
final Flux<ByteBuffer> bbFlux = request.getBody();
if (bbFlux == null) {
return Flux.empty();
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
return Flux.defer(() -> {
final long[] currentTotalLength = new long[1];
return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> {
if (buffer == null) {
return;
}
if (buffer == VALIDATION_BUFFER) {
if (expectedLength != currentTotalLength[0]) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
} else {
sink.complete();
}
return;
}
currentTotalLength[0] += buffer.remaining();
if (currentTotalLength[0] > expectedLength) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
return;
}
sink.next(buffer);
});
});
}
/**
* Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing
* additional context information.
* @param method Service method being called.
* @param context Context information about the current service call.
*
* @return The updated context containing the span context.
*/
/**
* Create a HttpRequest for the provided Swagger method using the provided arguments.
*
* @param methodParser the Swagger method parser to use
* @param args the arguments to use to populate the method's annotation values
* @return a HttpRequest
* @throws IOException thrown if the body contents cannot be serialized
*/
private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException {
final String path = methodParser.setPath(args);
final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path);
final UrlBuilder urlBuilder;
if (pathUrlBuilder.getScheme() != null) {
urlBuilder = pathUrlBuilder;
} else {
urlBuilder = new UrlBuilder();
methodParser.setSchemeAndHost(args, urlBuilder);
if (path != null && !path.isEmpty() && !"/".equals(path)) {
String hostPath = urlBuilder.getPath();
if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(":
urlBuilder.setPath(path);
} else {
urlBuilder.setPath(hostPath + "/" + path);
}
}
}
methodParser.setEncodedQueryParameters(args, urlBuilder);
final URL url = urlBuilder.toUrl();
final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url),
methodParser, args);
HttpHeaders httpHeaders = request.getHeaders();
methodParser.setHeaders(args, httpHeaders);
return request;
}
@SuppressWarnings("unchecked")
private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser,
final Object[] args) throws IOException {
final Object bodyContentObject = methodParser.setBody(args);
if (bodyContentObject == null) {
request.getHeaders().set("Content-Length", "0");
} else {
String contentType = methodParser.getBodyContentType();
if (contentType == null || contentType.isEmpty()) {
if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) {
contentType = ContentType.APPLICATION_OCTET_STREAM;
} else {
contentType = ContentType.APPLICATION_JSON;
}
}
request.getHeaders().set("Content-Type", contentType);
boolean isJson = false;
final String[] contentTypeParts = contentType.split(";");
for (final String contentTypePart : contentTypeParts) {
if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) {
isJson = true;
break;
}
}
if (isJson) {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
} else if (FluxUtil.isFluxByteBuffer(methodParser.getBodyJavaType())) {
request.setBody((Flux<ByteBuffer>) bodyContentObject);
} else if (bodyContentObject instanceof byte[]) {
request.setBody((byte[]) bodyContentObject);
} else if (bodyContentObject instanceof String) {
final String bodyContentString = (String) bodyContentObject;
if (!bodyContentString.isEmpty()) {
request.setBody(bodyContentString);
}
} else if (bodyContentObject instanceof ByteBuffer) {
request.setBody(Flux.just((ByteBuffer) bodyContentObject));
} else {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
}
}
return request;
}
private Mono<HttpDecodedResponse> ensureExpectedStatus(final Mono<HttpDecodedResponse> asyncDecodedResponse,
final SwaggerMethodParser methodParser) {
return asyncDecodedResponse
.flatMap(decodedHttpResponse -> ensureExpectedStatus(decodedHttpResponse, methodParser));
}
private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception,
final HttpResponse httpResponse,
final byte[] responseContent,
final Object responseDecodedContent) {
final int responseStatusCode = httpResponse.getStatusCode();
final String contentType = httpResponse.getHeaderValue("Content-Type");
final String bodyRepresentation;
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)";
} else {
bodyRepresentation = responseContent == null || responseContent.length == 0
? "(empty body)"
: "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\"";
}
Exception result;
try {
final Constructor<? extends HttpResponseException> exceptionConstructor =
exception.getExceptionType().getConstructor(String.class, HttpResponse.class,
exception.getExceptionBodyType());
result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation,
httpResponse,
responseDecodedContent);
} catch (ReflectiveOperationException e) {
String message = "Status code " + responseStatusCode + ", but an instance of "
+ exception.getExceptionType().getCanonicalName() + " cannot be created."
+ " Response body: " + bodyRepresentation;
result = new IOException(message, e);
}
return result;
}
/**
* Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status
* code' OR (2) emits provided response if it's status code ia allowed.
*
* 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[]
* of additional allowed status codes.
*
* @param decodedResponse The HttpResponse to check.
* @param methodParser The method parser that contains information about the service interface method that initiated
* the HTTP request.
* @return An async-version of the provided decodedResponse.
*/
private Mono<HttpDecodedResponse> ensureExpectedStatus(final HttpDecodedResponse decodedResponse,
final SwaggerMethodParser methodParser) {
final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode();
final Mono<HttpDecodedResponse> asyncResult;
if (!methodParser.isExpectedResponseStatusCode(responseStatusCode)) {
Mono<byte[]> bodyAsBytes = decodedResponse.getSourceResponse().getBodyAsByteArray();
asyncResult = bodyAsBytes.flatMap((Function<byte[], Mono<HttpDecodedResponse>>) responseContent -> {
Mono<Object> decodedErrorBody = decodedResponse.getDecodedBody(responseContent);
return decodedErrorBody
.flatMap((Function<Object, Mono<HttpDecodedResponse>>) responseDecodedErrorObject -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
responseDecodedErrorObject);
return Mono.error(exception);
})
.switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
null);
return Mono.error(exception);
}));
}).switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
null,
null);
return Mono.error(exception);
}));
} else {
asyncResult = Mono.just(decodedResponse);
}
return asyncResult;
}
private Mono<?> handleRestResponseReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser,
final Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) {
final Type bodyType = TypeUtil.getRestResponseBodyType(entityType);
if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) {
return response.getSourceResponse().getBody().ignoreElements()
.then(createResponse(response, entityType, null));
} else {
return handleBodyReturnType(response, methodParser, bodyType)
.flatMap(bodyAsObject -> createResponse(response, entityType, bodyAsObject))
.switchIfEmpty(Mono.defer((Supplier<Mono<Response<?>>>) () -> createResponse(response,
entityType, null)));
}
} else {
return handleBodyReturnType(response, methodParser, entityType);
}
}
@SuppressWarnings("unchecked")
private Mono<Response<?>> createResponse(HttpDecodedResponse response, Type entityType, Object bodyAsObject) {
Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType);
if (cls.equals(Response.class)) {
cls = (Class<? extends Response<?>>) (Object) ResponseBase.class;
} else if (cls.equals(PagedResponse.class)) {
cls = (Class<? extends Response<?>>) (Object) PagedResponseBase.class;
if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) {
throw logger.logExceptionAsError(new RuntimeException(
"Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class));
}
}
Constructor<? extends Response<?>> ctr = this.responseConstructorsCache.get(cls);
if (ctr != null) {
return this.responseConstructorsCache.invoke(ctr, response, bodyAsObject);
} else {
return Mono.error(new RuntimeException("Cannot find suitable constructor for class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
}
/**
* Handle the provided asynchronous HTTP response and return the deserialized value.
*
* @param asyncHttpDecodedResponse the asynchronous HTTP response to the original HTTP request
* @param methodParser the SwaggerMethodParser that the request originates from
* @param returnType the type of value that will be returned
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return the deserialized result
*/
private Object handleRestReturnType(final Mono<HttpDecodedResponse> asyncHttpDecodedResponse,
final SwaggerMethodParser methodParser,
final Type returnType,
final Context context) {
final Mono<HttpDecodedResponse> asyncExpectedResponse =
ensureExpectedStatus(asyncHttpDecodedResponse, methodParser)
.doOnEach(RestProxy::endTracingSpan)
.subscriberContext(reactor.util.context.Context.of("TRACING_CONTEXT", context));
final Object result;
if (TypeUtil.isTypeOrSubTypeOf(returnType, Mono.class)) {
final Type monoTypeParam = TypeUtil.getTypeArgument(returnType);
if (TypeUtil.isTypeOrSubTypeOf(monoTypeParam, Void.class)) {
result = asyncExpectedResponse.then();
} else {
result = asyncExpectedResponse.flatMap(response ->
handleRestResponseReturnType(response, methodParser, monoTypeParam));
}
} else if (FluxUtil.isFluxByteBuffer(returnType)) {
result = asyncExpectedResponse.flatMapMany(ar -> ar.getSourceResponse().getBody());
} else if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType,
Void.class)) {
asyncExpectedResponse.block();
result = null;
} else {
result = asyncExpectedResponse
.flatMap(httpResponse -> handleRestResponseReturnType(httpResponse, methodParser, returnType))
.block();
}
return result;
}
private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY);
if (!tracingContext.isPresent() || (disableTracing.isPresent() && disableTracing.get())) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
}
/**
* Create an instance of the default serializer.
*
* @return the default serializer
*/
private static SerializerAdapter createDefaultSerializer() {
return JacksonAdapter.createDefaultSerializerAdapter();
}
/**
* Create the default HttpPipeline.
*
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline() {
return createDefaultPipeline(null);
}
/**
* Create the default HttpPipeline.
*
* @param credentialsPolicy the credentials policy factory to use to apply authentication to the pipeline
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline(HttpPipelinePolicy credentialsPolicy) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy());
policies.add(new RetryPolicy());
policies.add(new CookiePolicy());
if (credentialsPolicy != null) {
policies.add(credentialsPolicy);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface) {
return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) {
return create(swaggerInterface, httpPipeline, createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests
* @param serializer the serializer that will be used to convert POJOs to and from request and response bodies
* @param <A> the type of the Swagger interface.
* @return a proxy implementation of the provided Swagger interface
*/
@SuppressWarnings("unchecked")
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) {
final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer);
final RestProxy restProxy = new RestProxy(httpPipeline, serializer, interfaceParser);
return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface},
restProxy);
}
} | class RestProxy implements InvocationHandler {
private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0);
private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes.";
private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes.";
private final ClientLogger logger = new ClientLogger(RestProxy.class);
private final HttpPipeline httpPipeline;
private final SerializerAdapter serializer;
private final SwaggerInterfaceParser interfaceParser;
private final HttpResponseDecoder decoder;
private final ResponseConstructorsCache responseConstructorsCache;
/**
* Create a RestProxy.
*
* @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests.
* @param serializer the serializer that will be used to convert response bodies to POJOs.
* @param interfaceParser the parser that contains information about the interface describing REST API methods that
* this RestProxy "implements".
*/
private RestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) {
this.httpPipeline = httpPipeline;
this.serializer = serializer;
this.interfaceParser = interfaceParser;
this.decoder = new HttpResponseDecoder(this.serializer);
this.responseConstructorsCache = new ResponseConstructorsCache();
}
/**
* Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this
* RestProxy was created to "implement".
*
* @param method the method to get a SwaggerMethodParser for
* @return the SwaggerMethodParser for the provided method
*/
private SwaggerMethodParser getMethodParser(Method method) {
return interfaceParser.getMethodParser(method);
}
/**
* Send the provided request asynchronously, applying any request policies provided to the HttpClient instance.
*
* @param request the HTTP request to send
* @param contextData the context
* @return a {@link Mono} that emits HttpResponse asynchronously
*/
public Mono<HttpResponse> send(HttpRequest request, Context contextData) {
return httpPipeline.send(request, contextData);
}
@Override
public Object invoke(Object proxy, final Method method, Object[] args) {
try {
if (method.isAnnotationPresent(ResumeOperation.class)) {
throw logger.logExceptionAsError(Exceptions.propagate(
new Exception("The resume operation isn't supported.")));
}
final SwaggerMethodParser methodParser = getMethodParser(method);
final HttpRequest request = createHttpRequest(methodParser, args);
Context context = methodParser.setContext(args)
.addData("caller-method", methodParser.getFullyQualifiedMethodName())
.addData("azure-eagerly-read-response", isReturnTypeDecodable(methodParser.getReturnType()));
context = startTracingSpan(method, context);
if (request.getBody() != null) {
request.setBody(validateLength(request));
}
final Mono<HttpResponse> asyncResponse = send(request, context);
Mono<HttpDecodedResponse> asyncDecodedResponse = this.decoder.decode(asyncResponse, methodParser);
return handleRestReturnType(asyncDecodedResponse, methodParser, methodParser.getReturnType(), context);
} catch (IOException e) {
throw logger.logExceptionAsError(Exceptions.propagate(e));
}
}
static Flux<ByteBuffer> validateLength(final HttpRequest request) {
final Flux<ByteBuffer> bbFlux = request.getBody();
if (bbFlux == null) {
return Flux.empty();
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
return Flux.defer(() -> {
final long[] currentTotalLength = new long[1];
return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> {
if (buffer == null) {
return;
}
if (buffer == VALIDATION_BUFFER) {
if (expectedLength != currentTotalLength[0]) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
} else {
sink.complete();
}
return;
}
currentTotalLength[0] += buffer.remaining();
if (currentTotalLength[0] > expectedLength) {
sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength));
return;
}
sink.next(buffer);
});
});
}
/**
* Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing
* additional context information.
* @param method Service method being called.
* @param context Context information about the current service call.
*
* @return The updated context containing the span context.
*/
/**
* Create a HttpRequest for the provided Swagger method using the provided arguments.
*
* @param methodParser the Swagger method parser to use
* @param args the arguments to use to populate the method's annotation values
* @return a HttpRequest
* @throws IOException thrown if the body contents cannot be serialized
*/
private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException {
final String path = methodParser.setPath(args);
final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path);
final UrlBuilder urlBuilder;
if (pathUrlBuilder.getScheme() != null) {
urlBuilder = pathUrlBuilder;
} else {
urlBuilder = new UrlBuilder();
methodParser.setSchemeAndHost(args, urlBuilder);
if (path != null && !path.isEmpty() && !"/".equals(path)) {
String hostPath = urlBuilder.getPath();
if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(":
urlBuilder.setPath(path);
} else {
urlBuilder.setPath(hostPath + "/" + path);
}
}
}
methodParser.setEncodedQueryParameters(args, urlBuilder);
final URL url = urlBuilder.toUrl();
final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url),
methodParser, args);
HttpHeaders httpHeaders = request.getHeaders();
methodParser.setHeaders(args, httpHeaders);
return request;
}
@SuppressWarnings("unchecked")
private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser,
final Object[] args) throws IOException {
final Object bodyContentObject = methodParser.setBody(args);
if (bodyContentObject == null) {
request.getHeaders().set("Content-Length", "0");
} else {
String contentType = methodParser.getBodyContentType();
if (contentType == null || contentType.isEmpty()) {
if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) {
contentType = ContentType.APPLICATION_OCTET_STREAM;
} else {
contentType = ContentType.APPLICATION_JSON;
}
}
request.getHeaders().set("Content-Type", contentType);
boolean isJson = false;
final String[] contentTypeParts = contentType.split(";");
for (final String contentTypePart : contentTypeParts) {
if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) {
isJson = true;
break;
}
}
if (isJson) {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
} else if (FluxUtil.isFluxByteBuffer(methodParser.getBodyJavaType())) {
request.setBody((Flux<ByteBuffer>) bodyContentObject);
} else if (bodyContentObject instanceof byte[]) {
request.setBody((byte[]) bodyContentObject);
} else if (bodyContentObject instanceof String) {
final String bodyContentString = (String) bodyContentObject;
if (!bodyContentString.isEmpty()) {
request.setBody(bodyContentString);
}
} else if (bodyContentObject instanceof ByteBuffer) {
request.setBody(Flux.just((ByteBuffer) bodyContentObject));
} else {
ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream();
serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream);
request.setHeader("Content-Length", String.valueOf(stream.size()));
request.setBody(Flux.defer(() -> Flux.just(ByteBuffer.wrap(stream.toByteArray(), 0, stream.size()))));
}
}
return request;
}
private Mono<HttpDecodedResponse> ensureExpectedStatus(final Mono<HttpDecodedResponse> asyncDecodedResponse,
final SwaggerMethodParser methodParser) {
return asyncDecodedResponse
.flatMap(decodedHttpResponse -> ensureExpectedStatus(decodedHttpResponse, methodParser));
}
private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception,
final HttpResponse httpResponse,
final byte[] responseContent,
final Object responseDecodedContent) {
final int responseStatusCode = httpResponse.getStatusCode();
final String contentType = httpResponse.getHeaderValue("Content-Type");
final String bodyRepresentation;
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)";
} else {
bodyRepresentation = responseContent == null || responseContent.length == 0
? "(empty body)"
: "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\"";
}
Exception result;
try {
final Constructor<? extends HttpResponseException> exceptionConstructor =
exception.getExceptionType().getConstructor(String.class, HttpResponse.class,
exception.getExceptionBodyType());
result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation,
httpResponse,
responseDecodedContent);
} catch (ReflectiveOperationException e) {
String message = "Status code " + responseStatusCode + ", but an instance of "
+ exception.getExceptionType().getCanonicalName() + " cannot be created."
+ " Response body: " + bodyRepresentation;
result = new IOException(message, e);
}
return result;
}
/**
* Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status
* code' OR (2) emits provided response if it's status code ia allowed.
*
* 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[]
* of additional allowed status codes.
*
* @param decodedResponse The HttpResponse to check.
* @param methodParser The method parser that contains information about the service interface method that initiated
* the HTTP request.
* @return An async-version of the provided decodedResponse.
*/
private Mono<HttpDecodedResponse> ensureExpectedStatus(final HttpDecodedResponse decodedResponse,
final SwaggerMethodParser methodParser) {
final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode();
final Mono<HttpDecodedResponse> asyncResult;
if (!methodParser.isExpectedResponseStatusCode(responseStatusCode)) {
Mono<byte[]> bodyAsBytes = decodedResponse.getSourceResponse().getBodyAsByteArray();
asyncResult = bodyAsBytes.flatMap((Function<byte[], Mono<HttpDecodedResponse>>) responseContent -> {
Mono<Object> decodedErrorBody = decodedResponse.getDecodedBody(responseContent);
return decodedErrorBody
.flatMap((Function<Object, Mono<HttpDecodedResponse>>) responseDecodedErrorObject -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
responseDecodedErrorObject);
return Mono.error(exception);
})
.switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
responseContent,
null);
return Mono.error(exception);
}));
}).switchIfEmpty(Mono.defer((Supplier<Mono<HttpDecodedResponse>>) () -> {
Throwable exception =
instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode),
decodedResponse.getSourceResponse(),
null,
null);
return Mono.error(exception);
}));
} else {
asyncResult = Mono.just(decodedResponse);
}
return asyncResult;
}
private Mono<?> handleRestResponseReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser,
final Type entityType) {
if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) {
final Type bodyType = TypeUtil.getRestResponseBodyType(entityType);
if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) {
return response.getSourceResponse().getBody().ignoreElements()
.then(createResponse(response, entityType, null));
} else {
return handleBodyReturnType(response, methodParser, bodyType)
.flatMap(bodyAsObject -> createResponse(response, entityType, bodyAsObject))
.switchIfEmpty(Mono.defer((Supplier<Mono<Response<?>>>) () -> createResponse(response,
entityType, null)));
}
} else {
return handleBodyReturnType(response, methodParser, entityType);
}
}
@SuppressWarnings("unchecked")
private Mono<Response<?>> createResponse(HttpDecodedResponse response, Type entityType, Object bodyAsObject) {
Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType);
if (cls.equals(Response.class)) {
cls = (Class<? extends Response<?>>) (Object) ResponseBase.class;
} else if (cls.equals(PagedResponse.class)) {
cls = (Class<? extends Response<?>>) (Object) PagedResponseBase.class;
if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) {
throw logger.logExceptionAsError(new RuntimeException(
"Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class));
}
}
Constructor<? extends Response<?>> ctr = this.responseConstructorsCache.get(cls);
if (ctr != null) {
return this.responseConstructorsCache.invoke(ctr, response, bodyAsObject);
} else {
return Mono.error(new RuntimeException("Cannot find suitable constructor for class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
}
/**
* Handle the provided asynchronous HTTP response and return the deserialized value.
*
* @param asyncHttpDecodedResponse the asynchronous HTTP response to the original HTTP request
* @param methodParser the SwaggerMethodParser that the request originates from
* @param returnType the type of value that will be returned
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return the deserialized result
*/
private Object handleRestReturnType(final Mono<HttpDecodedResponse> asyncHttpDecodedResponse,
final SwaggerMethodParser methodParser,
final Type returnType,
final Context context) {
final Mono<HttpDecodedResponse> asyncExpectedResponse =
ensureExpectedStatus(asyncHttpDecodedResponse, methodParser)
.doOnEach(RestProxy::endTracingSpan)
.subscriberContext(reactor.util.context.Context.of("TRACING_CONTEXT", context));
final Object result;
if (TypeUtil.isTypeOrSubTypeOf(returnType, Mono.class)) {
final Type monoTypeParam = TypeUtil.getTypeArgument(returnType);
if (TypeUtil.isTypeOrSubTypeOf(monoTypeParam, Void.class)) {
result = asyncExpectedResponse.then();
} else {
result = asyncExpectedResponse.flatMap(response ->
handleRestResponseReturnType(response, methodParser, monoTypeParam));
}
} else if (FluxUtil.isFluxByteBuffer(returnType)) {
result = asyncExpectedResponse.flatMapMany(ar -> ar.getSourceResponse().getBody());
} else if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType,
Void.class)) {
asyncExpectedResponse.block();
result = null;
} else {
result = asyncExpectedResponse
.flatMap(httpResponse -> handleRestResponseReturnType(httpResponse, methodParser, returnType))
.block();
}
return result;
}
private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
boolean disableTracing = context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false);
if (!tracingContext.isPresent() || disableTracing) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
}
/**
* Create an instance of the default serializer.
*
* @return the default serializer
*/
private static SerializerAdapter createDefaultSerializer() {
return JacksonAdapter.createDefaultSerializerAdapter();
}
/**
* Create the default HttpPipeline.
*
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline() {
return createDefaultPipeline(null);
}
/**
* Create the default HttpPipeline.
*
* @param credentialsPolicy the credentials policy factory to use to apply authentication to the pipeline
* @return the default HttpPipeline
*/
private static HttpPipeline createDefaultPipeline(HttpPipelinePolicy credentialsPolicy) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy());
policies.add(new RetryPolicy());
policies.add(new CookiePolicy());
if (credentialsPolicy != null) {
policies.add(credentialsPolicy);
}
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface) {
return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests
* @param <A> the type of the Swagger interface
* @return a proxy implementation of the provided Swagger interface
*/
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) {
return create(swaggerInterface, httpPipeline, createDefaultSerializer());
}
/**
* Create a proxy implementation of the provided Swagger interface.
*
* @param swaggerInterface the Swagger interface to provide a proxy implementation for
* @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests
* @param serializer the serializer that will be used to convert POJOs to and from request and response bodies
* @param <A> the type of the Swagger interface.
* @return a proxy implementation of the provided Swagger interface
*/
@SuppressWarnings("unchecked")
public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) {
final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer);
final RestProxy restProxy = new RestProxy(httpPipeline, serializer, interfaceParser);
return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface},
restProxy);
}
} |
Done. | private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY);
if (!tracingContext.isPresent() || (disableTracing.isPresent() && disableTracing.get())) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
} | Optional<Boolean> disableTracing = context.getOrEmpty(Tracer.DISABLE_TRACING_KEY); | private static void endTracingSpan(Signal<HttpDecodedResponse> signal) {
if (!TracerProxy.isTracingEnabled()) {
return;
}
if (signal.isOnComplete() || signal.isOnSubscribe()) {
return;
}
reactor.util.context.Context context = signal.getContext();
Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT");
boolean disableTracing = context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false);
if (!tracingContext.isPresent() || disableTracing) {
return;
}
int statusCode = 0;
HttpDecodedResponse httpDecodedResponse;
Throwable throwable = null;
if (signal.hasValue()) {
httpDecodedResponse = signal.get();
statusCode = httpDecodedResponse.getSourceResponse().getStatusCode();
} else if (signal.hasError()) {
throwable = signal.getThrowable();
if (throwable instanceof HttpResponseException) {
HttpResponseException exception = (HttpResponseException) throwable;
statusCode = exception.getResponse().getStatusCode();
}
}
TracerProxy.end(statusCode, throwable, tracingContext.get());
} | class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
} | class " + cls));
}
}
private Mono<?> handleBodyReturnType(final HttpDecodedResponse response,
final SwaggerMethodParser methodParser, final Type entityType) {
final int responseStatusCode = response.getSourceResponse().getStatusCode();
final HttpMethod httpMethod = methodParser.getHttpMethod();
final Type returnValueWireType = methodParser.getReturnValueWireType();
final Mono<?> asyncResult;
if (httpMethod == HttpMethod.HEAD
&& (TypeUtil.isTypeOrSubTypeOf(
entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) {
boolean isSuccess = (responseStatusCode / 100) == 2;
asyncResult = Mono.just(isSuccess);
} else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) {
Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray();
if (returnValueWireType == Base64Url.class) {
responseBodyBytesAsync =
responseBodyBytesAsync.map(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes());
}
asyncResult = responseBodyBytesAsync;
} else if (FluxUtil.isFluxByteBuffer(entityType)) {
asyncResult = Mono.just(response.getSourceResponse().getBody());
} else {
asyncResult = response.getDecodedBody((byte[]) null);
}
return asyncResult;
} |
existing bug in track2 (track1 is correct) | private Mono<VirtualMachineInner> retrieveVirtualMachineAsync() {
return computeManager.serviceClient().getVirtualMachines()
.getByResourceGroupAsync(rgName, vmName, InstanceViewTypes.INSTANCE_VIEW);
} | .getByResourceGroupAsync(rgName, vmName, InstanceViewTypes.INSTANCE_VIEW); | private Mono<VirtualMachineInner> retrieveVirtualMachineAsync() {
return computeManager.serviceClient().getVirtualMachines()
.getByResourceGroupAsync(rgName, vmName, InstanceViewTypes.INSTANCE_VIEW);
} | class LinuxDiskVolumeNoAADEncryptionMonitorImpl implements DiskVolumeEncryptionMonitor {
private final String rgName;
private final String vmName;
private final ComputeManager computeManager;
private VirtualMachineInner virtualMachine;
private VirtualMachineExtensionInstanceView extensionInstanceView;
/**
* Creates LinuxDiskVolumeNoAADEncryptionMonitorImpl.
*
* @param virtualMachineId resource id of Linux virtual machine to retrieve encryption status from
* @param computeManager compute manager
*/
LinuxDiskVolumeNoAADEncryptionMonitorImpl(String virtualMachineId, ComputeManager computeManager) {
this.rgName = ResourceUtils.groupFromResourceId(virtualMachineId);
this.vmName = ResourceUtils.nameFromResourceId(virtualMachineId);
this.computeManager = computeManager;
}
@Override
public OperatingSystemTypes osType() {
return OperatingSystemTypes.LINUX;
}
@Override
public String progressMessage() {
if (!hasEncryptionExtensionInstanceView()) {
return null;
}
return LinuxEncryptionExtensionUtil.progressMessage(this.extensionInstanceView);
}
@Override
public EncryptionStatus osDiskStatus() {
if (!hasEncryptionExtensionInstanceView()) {
return EncryptionStatus.NOT_ENCRYPTED;
}
return LinuxEncryptionExtensionUtil.osDiskStatus(this.extensionInstanceView);
}
@Override
public EncryptionStatus dataDiskStatus() {
if (!hasEncryptionExtensionInstanceView()) {
return EncryptionStatus.NOT_ENCRYPTED;
}
return LinuxEncryptionExtensionUtil.dataDiskStatus(this.extensionInstanceView);
}
@Override
public Map<String, InstanceViewStatus> diskInstanceViewEncryptionStatuses() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return new HashMap<>();
}
HashMap<String, InstanceViewStatus> div = new HashMap<String, InstanceViewStatus>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
if (encryptionStatusFromCode(status.code()) != null) {
div.put(diskInstanceView.name(), status);
break;
}
}
}
return div;
}
@Override
public DiskVolumeEncryptionMonitor refresh() {
return refreshAsync().block();
}
@Override
public Mono<DiskVolumeEncryptionMonitor> refreshAsync() {
final LinuxDiskVolumeNoAADEncryptionMonitorImpl self = this;
return retrieveVirtualMachineAsync()
.flatMap(
virtualMachine -> {
self.virtualMachine = virtualMachine;
if (virtualMachine.instanceView() != null && virtualMachine.instanceView().extensions() != null) {
for (VirtualMachineExtensionInstanceView eiv : virtualMachine.instanceView().extensions()) {
if (eiv.type() != null
&& eiv
.type()
.toLowerCase(Locale.ROOT)
.startsWith(EncryptionExtensionIdentifier.publisherName().toLowerCase(Locale.ROOT))
&& eiv.name() != null
&& EncryptionExtensionIdentifier.isEncryptionTypeName(eiv.name(), osType())) {
self.extensionInstanceView = eiv;
break;
}
}
}
return Mono.just(self);
});
}
/**
* Retrieve the virtual machine. If the virtual machine does not exists then an error observable will be returned.
*
* @return the retrieved virtual machine
*/
private boolean hasEncryptionExtensionInstanceView() {
return this.extensionInstanceView != null;
}
/**
* Given disk instance view status code, check whether it is encryption status code if yes map it to
* EncryptionStatus.
*
* @param code the encryption status code
* @return mapped EncryptionStatus if given code is encryption status code, null otherwise.
*/
private static EncryptionStatus encryptionStatusFromCode(String code) {
if (code != null && code.toLowerCase(Locale.ROOT).startsWith("encryptionstate")) {
String[] parts = code.split("/", 2);
if (parts.length != 2) {
return EncryptionStatus.UNKNOWN;
} else {
return EncryptionStatus.fromString(parts[1]);
}
}
return null;
}
} | class LinuxDiskVolumeNoAADEncryptionMonitorImpl implements DiskVolumeEncryptionMonitor {
private final String rgName;
private final String vmName;
private final ComputeManager computeManager;
private VirtualMachineInner virtualMachine;
private VirtualMachineExtensionInstanceView extensionInstanceView;
/**
* Creates LinuxDiskVolumeNoAADEncryptionMonitorImpl.
*
* @param virtualMachineId resource id of Linux virtual machine to retrieve encryption status from
* @param computeManager compute manager
*/
LinuxDiskVolumeNoAADEncryptionMonitorImpl(String virtualMachineId, ComputeManager computeManager) {
this.rgName = ResourceUtils.groupFromResourceId(virtualMachineId);
this.vmName = ResourceUtils.nameFromResourceId(virtualMachineId);
this.computeManager = computeManager;
}
@Override
public OperatingSystemTypes osType() {
return OperatingSystemTypes.LINUX;
}
@Override
public String progressMessage() {
if (!hasEncryptionExtensionInstanceView()) {
return null;
}
return LinuxEncryptionExtensionUtil.progressMessage(this.extensionInstanceView);
}
@Override
public EncryptionStatus osDiskStatus() {
if (!hasEncryptionExtensionInstanceView()) {
return EncryptionStatus.NOT_ENCRYPTED;
}
return LinuxEncryptionExtensionUtil.osDiskStatus(this.extensionInstanceView);
}
@Override
public EncryptionStatus dataDiskStatus() {
if (!hasEncryptionExtensionInstanceView()) {
return EncryptionStatus.NOT_ENCRYPTED;
}
return LinuxEncryptionExtensionUtil.dataDiskStatus(this.extensionInstanceView);
}
@Override
public Map<String, InstanceViewStatus> diskInstanceViewEncryptionStatuses() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return new HashMap<>();
}
HashMap<String, InstanceViewStatus> div = new HashMap<String, InstanceViewStatus>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
if (encryptionStatusFromCode(status.code()) != null) {
div.put(diskInstanceView.name(), status);
break;
}
}
}
return div;
}
@Override
public DiskVolumeEncryptionMonitor refresh() {
return refreshAsync().block();
}
@Override
public Mono<DiskVolumeEncryptionMonitor> refreshAsync() {
final LinuxDiskVolumeNoAADEncryptionMonitorImpl self = this;
return retrieveVirtualMachineAsync()
.flatMap(
virtualMachine -> {
self.virtualMachine = virtualMachine;
if (virtualMachine.instanceView() != null && virtualMachine.instanceView().extensions() != null) {
for (VirtualMachineExtensionInstanceView eiv : virtualMachine.instanceView().extensions()) {
if (eiv.type() != null
&& eiv
.type()
.toLowerCase(Locale.ROOT)
.startsWith(EncryptionExtensionIdentifier.publisherName().toLowerCase(Locale.ROOT))
&& eiv.name() != null
&& EncryptionExtensionIdentifier.isEncryptionTypeName(eiv.name(), osType())) {
self.extensionInstanceView = eiv;
break;
}
}
}
return Mono.just(self);
});
}
/**
* Retrieve the virtual machine. If the virtual machine does not exists then an error observable will be returned.
*
* @return the retrieved virtual machine
*/
private boolean hasEncryptionExtensionInstanceView() {
return this.extensionInstanceView != null;
}
/**
* Given disk instance view status code, check whether it is encryption status code if yes map it to
* EncryptionStatus.
*
* @param code the encryption status code
* @return mapped EncryptionStatus if given code is encryption status code, null otherwise.
*/
private static EncryptionStatus encryptionStatusFromCode(String code) {
if (code != null && code.toLowerCase(Locale.ROOT).startsWith("encryptionstate")) {
String[] parts = code.split("/", 2);
if (parts.length != 2) {
return EncryptionStatus.UNKNOWN;
} else {
return EncryptionStatus.fromString(parts[1]);
}
}
return null;
}
} |
@mssfang sorry I've been trying to parse the java code, but can't read it well enough :'(. Are the errors inserted into the main getter, i.e. if the second PII action result is an error, would getting the PII action results mean [GoodResponse, BadRespone]? Or are they returned in a separate getter `getActionError`. | public void analyzeBatchActionsAllFailed(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsAllFailedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(
getExpectedRecognizeEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, ENTITY_TASK, "0")))),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "1")))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, KEY_PHRASES_TASK, "0")))))),
result.toStream().collect(Collectors.toList()));
}
);
} | getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null, | public void analyzeBatchActionsAllFailed(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsAllFailedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(
getExpectedRecognizeEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, ENTITY_TASK, "0")))),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "1")))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, KEY_PHRASES_TASK, "0")))))),
result.toStream().collect(Collectors.toList()));
}
);
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getUnknownDetectedLanguage(), response))
.verifyComplete());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(getCategorizedEntitiesList1(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(asList(getPiiEntitiesList1().get(1)),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(response -> assertEquals("monde", response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, expectedDocumentSentiment, response))
.verifyComplete();
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcare(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>>
syncPoller = client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeBatchActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesOptions(new RecognizeEntitiesOptions()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeBatchActionsPartialCompleted(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPartialCompletedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(Collections.emptyList()),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getUnknownDetectedLanguage(), response))
.verifyComplete());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(getCategorizedEntitiesList1(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(asList(getPiiEntitiesList1().get(1)),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(response -> assertEquals("monde", response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, expectedDocumentSentiment, response))
.verifyComplete();
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcare(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>>
syncPoller = client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeBatchActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesOptions(new RecognizeEntitiesOptions()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeBatchActionsPartialCompleted(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPartialCompletedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(Collections.emptyList()),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
} |
Yes, The response will be [GoodResponse, BadResponse] that maps to the input action order. Each actionResult could be successful action result or action error. | public void analyzeBatchActionsAllFailed(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsAllFailedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(
getExpectedRecognizeEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, ENTITY_TASK, "0")))),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "1")))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, KEY_PHRASES_TASK, "0")))))),
result.toStream().collect(Collectors.toList()));
}
);
} | getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null, | public void analyzeBatchActionsAllFailed(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsAllFailedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(
getExpectedRecognizeEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, ENTITY_TASK, "0")))),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "1")))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, KEY_PHRASES_TASK, "0")))))),
result.toStream().collect(Collectors.toList()));
}
);
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getUnknownDetectedLanguage(), response))
.verifyComplete());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(getCategorizedEntitiesList1(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(asList(getPiiEntitiesList1().get(1)),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(response -> assertEquals("monde", response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, expectedDocumentSentiment, response))
.verifyComplete();
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcare(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>>
syncPoller = client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeBatchActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesOptions(new RecognizeEntitiesOptions()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeBatchActionsPartialCompleted(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPartialCompletedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(Collections.emptyList()),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
return getTextAnalyticsAsyncClientBuilder(httpClient, serviceVersion).buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getUnknownDetectedLanguage(), response))
.verifyComplete());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(getCategorizedEntitiesList1(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(asList(getPiiEntitiesList1().get(1)),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityDuplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(response -> assertEquals("monde", response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(result -> assertFalse(result.getWarnings().iterator().hasNext()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesDuplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateAnalyzedSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateAnalyzedSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a faulty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForFaultyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
faultyTextRunner(input -> {
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(
TextSentiment.NEUTRAL,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(
new SentenceSentiment("!", TextSentiment.NEUTRAL, new SentimentConfidenceScores(0.0, 0.0, 0.0), null, 0),
new SentenceSentiment("@
)), null);
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateAnalyzedSentiment(false, expectedDocumentSentiment, response))
.verifyComplete();
});
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentDuplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
textAnalyticsInputEmptyIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and mined options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not mined options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes mined options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes mined options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
diacriticsNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfcRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
koreanNfdRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
zalgoTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(0, sentenceSentiment.getOffset());
})).verifyComplete(), SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForSinglePage(),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<HealthcareTaskResult> healthcareEntitiesResultCollectionPagedFlux
= syncPoller.getFinalResult();
validateHealthcareTaskResult(
options.isIncludeStatistics(),
getExpectedHealthcareTaskResultListForMultiplePages(0, 10, 0),
healthcareEntitiesResultCollectionPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcare(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<TextAnalyticsOperationResult, PagedFlux<HealthcareTaskResult>>
syncPoller = client.beginAnalyzeHealthcare(documents, options).getSyncPoller();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>>
syncPoller = client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
getExpectedAnalyzeTaskResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeTasksEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeBatchActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesOptions(new RecognizeEntitiesOptions()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeBatchActionsPartialCompleted(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion);
analyzeBatchActionsPartialCompletedRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> syncPoller =
client.beginAnalyzeBatchActions(documents, tasks,
new AnalyzeBatchActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller.waitForCompletion();
PagedFlux<AnalyzeBatchActionsResult> result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(Collections.emptyList()),
IterableStream.of(asList(
getExpectedRecognizePiiEntitiesActionResult(true, TIME_NOW, null,
getActionError(INVALID_REQUEST, PII_TASK, "0")),
getExpectedRecognizePiiEntitiesActionResult(
false, TIME_NOW, getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(asList(
getExpectedExtractKeyPhrasesActionResult(
false, TIME_NOW, getExtractKeyPhrasesResultCollection(), null))))),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
} |
NIT: I wonder if it will be worth creating an actual pair or a small internal struct that will save the action type and the id of it. It will help when reading the code to understand what is in `[0]` and in `[1]` | private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
} | String[] taskNameIdPair = new String[2]; | private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET = "
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), error.getTarget()));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET =
"
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} |
I think this is fine. It'll drop to the case below since both of them are unsupported at the moment. | public BinaryData getBodyAsBinaryData() {
return body;
} | return body; | public BinaryData getBodyAsBinaryData() {
return BinaryData.fromBytes(annotatedMessage.getBody().getFirstData());
} | class EventData {
private static final int MAX_MESSAGE_ID_LENGTH = 128;
private static final int MAX_PARTITION_KEY_LENGTH = 128;
private static final int MAX_SESSION_ID_LENGTH = 128;
private final BinaryData body;
private final AmqpAnnotatedMessage amqpAnnotatedMessage;
private final ClientLogger logger = new ClientLogger(EventData.class);
private Context context;
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null.")));
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
public EventData(BinaryData body) {
this(body, Context.NONE);
}
/**
* Creates an event with the given {@code body}, system properties and context.
*
* @param body The data to set for this event.
* @param context A specified key-value pair of type {@link Context}.
* @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}.
*/
EventData(BinaryData body, Context context) {
this.body = Objects.requireNonNull(body, "'body' cannot be null.");
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.amqpAnnotatedMessage = new AmqpAnnotatedMessage(AmqpMessageBody.fromData(body.toBytes()));
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}.
*/
public Map<String, Object> getProperties() {
return amqpAnnotatedMessage.getApplicationProperties();
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
final AmqpMessageBodyType type = amqpAnnotatedMessage.getBody().getBodyType();
switch (type) {
case DATA:
return amqpAnnotatedMessage.getBody().getFirstData();
case SEQUENCE:
case VALUE:
throw logger.logExceptionAsError(new UnsupportedOperationException("Not supported AmqpBodyType: "
+ type.toString()));
default:
throw logger.logExceptionAsError(new IllegalArgumentException("Unknown AmqpBodyType: "
+ type.toString()));
}
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(body.toBytes(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
Object value = amqpAnnotatedMessage.getMessageAnnotations().get(OFFSET_ANNOTATION_NAME.getValue());
return value != null
? (Long) value
: null;
}
/**
* Sets the offset of the event when it was received from the associated Event Hub partition.
*
* @param offset Offset value of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setOffset(Long offset) {
amqpAnnotatedMessage.getMessageAnnotations().put(OFFSET_ANNOTATION_NAME.getValue(), offset);
return this;
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return (String) amqpAnnotatedMessage.getMessageAnnotations().get(PARTITION_KEY_ANNOTATION_NAME.getValue());
}
/**
* Sets the instant, in UTC, of when the event was enqueued in the Event Hub partition.
*
* @param enqueuedTime Enqueued time of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setEnqueuedTime(Instant enqueuedTime) {
amqpAnnotatedMessage.getMessageAnnotations().put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), enqueuedTime);
return this;
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
Object value = amqpAnnotatedMessage.getMessageAnnotations().get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
return value != null
? ((Date) value).toInstant()
: null;
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
return value != null
? (Long) value
: null;
}
/**
* Sets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition.
*
* @param sequenceNumber Sequence number of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setSequenceNumber(Long sequenceNumber) {
amqpAnnotatedMessage.getMessageAnnotations().put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), sequenceNumber);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(body.toBytes(), eventData.body.toBytes());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(body.toBytes());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
* @throws NullPointerException if {@code key} or {@code value} is null.
* @return The updated {@link EventData}.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
/**
* Gets the content type of the message.
*
* <p>
* Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5,
* for example "application/json".
* </p>
* @return The content type of the {@link EventData}.
*/
public String getContentType() {
return amqpAnnotatedMessage.getProperties().getContentType();
}
/**
* Sets the content type of the {@link EventData}.
*
* <p>
* Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5,
* for example "application/json".
* </p>
*
* @param contentType RFC2045 Content-Type descriptor of the message.
*
* @return The updated {@link EventData}.
*/
public EventData setContentType(String contentType) {
amqpAnnotatedMessage.getProperties().setContentType(contentType);
return this;
}
/**
* Gets a correlation identifier.
* <p>
* Allows an application to specify a context for the message for the purposes of correlation, for example
* reflecting the MessageId of a message that is being replied to.
* </p>
*
* @return The correlation id of this message.
*/
public String getCorrelationId() {
String correlationId = null;
AmqpMessageId amqpCorrelationId = amqpAnnotatedMessage.getProperties().getCorrelationId();
if (amqpCorrelationId != null) {
correlationId = amqpCorrelationId.toString();
}
return correlationId;
}
/**
* Sets a correlation identifier.
*
* @param correlationId correlation id of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setCorrelationId(String correlationId) {
AmqpMessageId id = null;
if (correlationId != null) {
id = new AmqpMessageId(correlationId);
}
amqpAnnotatedMessage.getProperties().setCorrelationId(id);
return this;
}
/**
* Gets the message id.
*
* <p>
* The message identifier is an application-defined value that uniquely identifies the message and its payload. The
* identifier is a free-form string and can reflect a GUID or an identifier derived from the application context.
* </p>
*
* @return Id of the {@link EventData}.
*/
public byte[] getUserId() {
return amqpAnnotatedMessage.getProperties().getUserId();
}
/**
* Sets the message id.
*
* @param userId The message id to be set.
*
* @return The updated {@link EventData}.
* @throws IllegalArgumentException if {@code messageId} is too long.
*/
public EventData setUserId(byte[] userId) {
amqpAnnotatedMessage.getProperties().setUserId(userId);
return this;
}
/**
* Gets the message id.
*
* <p>
* The message identifier is an application-defined value that uniquely identifies the message and its payload. The
* identifier is a free-form string and can reflect a GUID or an identifier derived from the application context.
* </p>
*
* @return Id of the {@link EventData}.
*/
public String getMessageId() {
String messageId = null;
AmqpMessageId amqpMessageId = amqpAnnotatedMessage.getProperties().getMessageId();
if (amqpMessageId != null) {
messageId = amqpMessageId.toString();
}
return messageId;
}
/**
* Sets the message id.
*
* @param messageId The message id to be set.
*
* @return The updated {@link EventData}.
* @throws IllegalArgumentException if {@code messageId} is too long.
*/
public EventData setMessageId(String messageId) {
checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH);
AmqpMessageId id = null;
if (messageId != null) {
id = new AmqpMessageId(messageId);
}
amqpAnnotatedMessage.getProperties().setMessageId(id);
return this;
}
/**
* Gets the subject for the message.
*
* <p>
* This property enables the application to indicate the purpose of the message to the receiver in a standardized
* fashion, similar to an email subject line. The mapped AMQP property is "subject".
* </p>
*
* @return The subject for the message.
*/
public String getSubject() {
return amqpAnnotatedMessage.getProperties().getSubject();
}
/**
* Sets the subject for the message.
*
* @param subject The application specific subject.
*
* @return The updated {@link EventData} object.
*/
public EventData setSubject(String subject) {
amqpAnnotatedMessage.getProperties().setSubject(subject);
return this;
}
/**
* Gets the "to" address.
*
* <p>
* This property is reserved for future use in routing scenarios and presently ignored by the broker itself.
* Applications can use this value in rule-driven
* auto-forward scenarios to indicate the intended logical destination of the message.
* </p>
*
* @return "To" property value of this message
*/
public String getTo() {
String to = null;
AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getTo();
if (amqpAddress != null) {
to = amqpAddress.toString();
}
return to;
}
/**
* Sets the "to" address.
*
* <p>
* This property is reserved for future use in routing scenarios and presently ignored by the broker itself.
* Applications can use this value in rule-driven
* auto-forward chaining scenarios to indicate the intended logical destination of the message.
* </p>
*
* @param to To property value of this message.
*
* @return The updated {@link EventData}.
*/
public EventData setTo(String to) {
AmqpAddress toAddress = null;
if (to != null) {
toAddress = new AmqpAddress(to);
}
amqpAnnotatedMessage.getProperties().setTo(toAddress);
return this;
}
/**
* Gets the address of an entity to send replies to.
* <p>
* This optional and application-defined value is a standard way to express a reply path to the receiver of the
* message. When a sender expects a reply, it sets the value to the absolute or relative path of the queue or topic
* it expects the reply to be sent to.
*
* @return ReplyTo property value of this message
*/
public String getReplyTo() {
String replyTo = null;
AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getReplyTo();
if (amqpAddress != null) {
replyTo = amqpAddress.toString();
}
return replyTo;
}
/**
* Sets the address of an entity to send replies to.
*
* @param replyTo ReplyTo property value of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setReplyTo(String replyTo) {
AmqpAddress replyToAddress = null;
if (replyTo != null) {
replyToAddress = new AmqpAddress(replyTo);
}
amqpAnnotatedMessage.getProperties().setReplyTo(replyToAddress);
return this;
}
/**
* Gets the duration before this message expires.
* <p>
* This value is the relative duration after which the message expires, starting from the instant the message has
* been accepted and stored by the broker, as captured in {@link
* explicitly, the assumed value is the DefaultTimeToLive set for the respective queue or topic. A message-level
* TimeToLive value cannot be longer than the entity's DefaultTimeToLive setting and it is silently adjusted if it
* does.
*
* @return Time to live duration of this message
*/
public Duration getTimeToLive() {
return amqpAnnotatedMessage.getHeader().getTimeToLive();
}
/**
* Sets the duration of time before this message expires.
*
* @param timeToLive Time to Live duration of this message
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setTimeToLive(Duration timeToLive) {
amqpAnnotatedMessage.getHeader().setTimeToLive(timeToLive);
return this;
}
/**
* Gets the session identifier for a session-aware entity.
*
* <p>
* For session-aware entities, this application-defined value specifies the session affiliation of the message.
* Messages with the same session identifier are subject to summary locking and enable exact in-order processing and
* demultiplexing. For session-unaware entities, this value is ignored.
* </p>
*
* @return The session id of the {@link EventData}.
* @see <a href="https:
*/
public String getSessionId() {
return amqpAnnotatedMessage.getProperties().getGroupId();
}
/**
* Sets the session identifier for a session-aware entity.
*
* @param sessionId The session identifier to be set.
*
* @return The updated {@link EventData}.
* @throws IllegalArgumentException if {@code sessionId} is too long or if the {@code sessionId} does not match
* the {@code partitionKey}.
*/
public EventData setSessionId(String sessionId) {
checkIdLength("sessionId", sessionId, MAX_SESSION_ID_LENGTH);
checkSessionId(sessionId);
amqpAnnotatedMessage.getProperties().setGroupId(sessionId);
return this;
}
/**
* Gets the scheduled enqueue time of this message.
* <p>
* This value is used for delayed message availability. The message is safely added to the queue, but is not
* considered active and therefore not retrievable until the scheduled enqueue time. Mind that the message may not
* be activated (enqueued) at the exact given datetime; the actual activation time depends on the queue's workload
* and its state.
* </p>
*
* @return the datetime at which the message will be enqueued in Azure Service Bus
*/
public OffsetDateTime getScheduledEnqueueTime() {
Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue());
return value != null
? ((OffsetDateTime) value).toInstant().atOffset(ZoneOffset.UTC)
: null;
}
/**
* Sets the scheduled enqueue time of this message. A {@code null} will not be set. If this value needs to be unset
* it could be done by value removing from {@link AmqpAnnotatedMessage
* AmqpMessageConstant
*
* @param scheduledEnqueueTime the datetime at which this message should be enqueued in Azure Service Bus.
*
* @return The updated {@link EventData}.
* @see
*/
public EventData setScheduledEnqueueTime(OffsetDateTime scheduledEnqueueTime) {
if (scheduledEnqueueTime != null) {
amqpAnnotatedMessage.getMessageAnnotations().put(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue(),
scheduledEnqueueTime);
}
return this;
}
/**
* Sets a partition key for sending a message to a partitioned entity
*
* @param partitionKey The partition key of this message.
*
* @return The updated {@link EventData}.
* @throws IllegalArgumentException if {@code partitionKey} is too long or if the {@code partitionKey} does not
* match the {@code sessionId}.
* @see
*/
public EventData setPartitionKey(String partitionKey) {
checkIdLength("partitionKey", partitionKey, MAX_PARTITION_KEY_LENGTH);
checkPartitionKey(partitionKey);
amqpAnnotatedMessage.getMessageAnnotations().put(PARTITION_KEY_ANNOTATION_NAME.getValue(), partitionKey);
return this;
}
/**
* Gets or sets a session identifier augmenting the {@link
* <p>
* This value augments the {@link
* be set for the reply when sent to the reply entity.
*
* @return The {@code getReplyToGroupId} property value of this message.
*/
public String getReplyToSessionId() {
return amqpAnnotatedMessage.getProperties().getReplyToGroupId();
}
/**
* Gets or sets a session identifier augmenting the {@link
*
* @param replyToSessionId The ReplyToGroupId property value of this message.
*
* @return The updated {@link EventData}.
*/
public EventData setReplyToSessionId(String replyToSessionId) {
amqpAnnotatedMessage.getProperties().setReplyToGroupId(replyToSessionId);
return this;
}
/**
* Gets the {@link AmqpAnnotatedMessage}.
*
* @return The raw AMQP message.
*/
public AmqpAnnotatedMessage getRawAmqpMessage() {
return amqpAnnotatedMessage;
}
/**
* Validates that the user can't set the partitionKey to a different value than the session ID. (this will
* eventually migrate to a service-side check)
*/
private void checkSessionId(String proposedSessionId) {
if (proposedSessionId == null) {
return;
}
if (this.getPartitionKey() != null && this.getPartitionKey().compareTo(proposedSessionId) != 0) {
final String message = String.format(
"sessionId:%s cannot be set to a different value than partitionKey:%s.",
proposedSessionId,
this.getPartitionKey());
throw logger.logExceptionAsError(new IllegalArgumentException(message));
}
}
/**
* Checks the length of ID fields.
*
* Some fields within the message will cause a failure in the service without enough context information.
*/
private void checkIdLength(String fieldName, String value, int maxLength) {
if (value != null && value.length() > maxLength) {
final String message = String.format("%s cannot be longer than %d characters.", fieldName, maxLength);
throw logger.logExceptionAsError(new IllegalArgumentException(message));
}
}
/**
* Validates that the user can't set the partitionKey to a different value than the session ID. (this will
* eventually migrate to a service-side check)
*/
private void checkPartitionKey(String proposedPartitionKey) {
if (proposedPartitionKey == null) {
return;
}
if (this.getSessionId() != null && this.getSessionId().compareTo(proposedPartitionKey) != 0) {
final String message = String.format(
"partitionKey:%s cannot be set to a different value than sessionId:%s.",
proposedPartitionKey,
this.getSessionId());
throw logger.logExceptionAsError(new IllegalArgumentException(message));
}
}
} | class EventData {
/*
* These are properties owned by the service and set when a message is received.
*/
static final Set<String> RESERVED_SYSTEM_PROPERTIES;
private final Map<String, Object> properties;
private final SystemProperties systemProperties;
private final AmqpAnnotatedMessage annotatedMessage;
private Context context;
static {
final Set<String> properties = new HashSet<>();
properties.add(OFFSET_ANNOTATION_NAME.getValue());
properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue());
properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
properties.add(PUBLISHER_ANNOTATION_NAME.getValue());
RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties);
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
*
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this.context = Context.NONE;
final AmqpMessageBody messageBody = AmqpMessageBody.fromData(
Objects.requireNonNull(body, "'body' cannot be null."));
this.annotatedMessage = new AmqpAnnotatedMessage(messageBody);
this.properties = annotatedMessage.getApplicationProperties();
this.systemProperties = new SystemProperties();
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
*
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
*
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
public EventData(BinaryData body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").toBytes());
}
/**
* Creates an event with the given {@code body}, system properties and context. Used in the case where a message
* is received from the service.
*
* @param context A specified key-value pair of type {@link Context}.
* @param amqpAnnotatedMessage Backing annotated message.
*
* @throws NullPointerException if {@code amqpAnnotatedMessage} or {@code context} is {@code null}.
* @throws IllegalArgumentException if {@code amqpAnnotatedMessage}'s body type is unknown.
*/
EventData(AmqpAnnotatedMessage amqpAnnotatedMessage, SystemProperties systemProperties, Context context) {
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.properties = Collections.unmodifiableMap(amqpAnnotatedMessage.getApplicationProperties());
this.annotatedMessage = Objects.requireNonNull(amqpAnnotatedMessage,
"'amqpAnnotatedMessage' cannot be null.");
this.systemProperties = systemProperties;
switch (annotatedMessage.getBody().getBodyType()) {
case DATA:
break;
case SEQUENCE:
case VALUE:
new ClientLogger(EventData.class).warning("Message body type '{}' is not supported in EH. "
+ " Getting contents of body may throw.", annotatedMessage.getBody().getBodyType());
break;
default:
throw new ClientLogger(EventData.class).logExceptionAsError(new IllegalArgumentException(
"Body type not valid " + annotatedMessage.getBody().getBodyType()));
}
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}. For received {@link EventData}, the map is
* a read-only view.
*/
public Map<String, Object> getProperties() {
return properties;
}
/**
* Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are
* only present on a <b>received</b> {@link EventData}.
*
* @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. {@code
* null} if the {@link EventData} is not received from the Event Hubs service.
*/
public Map<String, Object> getSystemProperties() {
return systemProperties;
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
return annotatedMessage.getBody().getFirstData();
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(annotatedMessage.getBody().getFirstData(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
return systemProperties.getOffset();
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return systemProperties.getPartitionKey();
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
return systemProperties.getEnqueuedTime();
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> {@link
* EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
return systemProperties.getSequenceNumber();
}
/**
* Gets the underlying AMQP message.
*
* @return The underlying AMQP message.
*/
public AmqpAnnotatedMessage getRawAmqpMessage() {
return annotatedMessage;
}
/**
* Gets the content type.
*
* @return The content type.
*/
public String getContentType() {
return annotatedMessage.getProperties().getContentType();
}
/**
* Sets the content type.
*
* @param contentType The content type.
*
* @return The updated {@link EventData}.
*/
public EventData setContentType(String contentType) {
annotatedMessage.getProperties().setContentType(contentType);
return this;
}
/**
* Gets the correlation id.
*
* @return The correlation id. {@code null} if there is none set.
*/
public String getCorrelationId() {
final AmqpMessageId messageId = annotatedMessage.getProperties().getCorrelationId();
return messageId != null ? messageId.toString() : null;
}
/**
* Sets the correlation id.
*
* @param correlationId The correlation id.
*
* @return The updated {@link EventData}.
*/
public EventData setCorrelationId(String correlationId) {
final AmqpMessageId id = correlationId != null ? new AmqpMessageId(correlationId) : null;
annotatedMessage.getProperties().setCorrelationId(id);
return this;
}
/**
* Gets the message id.
*
* @return The message id. {@code null} if there is none set.
*/
public String getMessageId() {
final AmqpMessageId messageId = annotatedMessage.getProperties().getMessageId();
return messageId != null ? messageId.toString() : null;
}
/**
* Sets the message id.
*
* @param messageId The message id.
*
* @return The updated {@link EventData}.
*/
public EventData setMessageId(String messageId) {
final AmqpMessageId id = messageId != null ? new AmqpMessageId(messageId) : null;
annotatedMessage.getProperties().setMessageId(id);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(annotatedMessage.getBody().getFirstData(),
eventData.annotatedMessage.getBody().getFirstData());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(annotatedMessage.getBody().getFirstData());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
*
* @return The updated {@link EventData}.
*
* @throws NullPointerException if {@code key} or {@code value} is null.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
} |
consider not adding the target to the error as this won't provide useful information for the user. | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), error.getTarget()));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
} | error.getMessage(), error.getTarget())); | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET = "
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
}
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET =
"
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
}
} |
I think It is not worth. String[] should be good enough. | private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
} | String[] taskNameIdPair = new String[2]; | private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET = "
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), error.getTarget()));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET =
"
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
} |
I think this logic does not insert the errors in a sorted way with respect to the ordering of the input actions list, right? | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
} | TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); | private AnalyzeBatchActionsResult toAnalyzeTasks(AnalyzeJobState analyzeJobState) {
TasksStateTasks tasksStateTasks = analyzeJobState.getTasks();
final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems =
tasksStateTasks.getEntityRecognitionPiiTasks();
final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems =
tasksStateTasks.getEntityRecognitionTasks();
final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks =
tasksStateTasks.getKeyPhraseExtractionTasks();
List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>();
List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>();
List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>();
if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) {
for (int i = 0; i < entityRecognitionTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i);
final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizeEntitiesResultCollectionResponse(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizeEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(piiTasksItems)) {
for (int i = 0; i < piiTasksItems.size(); i++) {
final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i);
final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setResult(actionResult,
toRecognizePiiEntitiesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
recognizePiiEntitiesActionResults.add(actionResult);
}
}
if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) {
for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) {
final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i);
final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setResult(actionResult,
toExtractKeyPhrasesResultCollection(taskItem.getResults()));
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult,
taskItem.getLastUpdateDateTime());
extractKeyPhrasesActionResults.add(actionResult);
}
}
final List<TextAnalyticsError> errors = analyzeJobState.getErrors();
if (!CoreUtils.isNullOrEmpty(errors)) {
for (TextAnalyticsError error : errors) {
final String[] targetPair = parseActionErrorTarget(error.getTarget());
final String taskName = targetPair[0];
final Integer taskIndex = Integer.valueOf(targetPair[1]);
final TextAnalyticsActionResult actionResult;
if ("entityRecognitionTasks".equals(taskName)) {
actionResult = recognizeEntitiesActionResults.get(taskIndex);
} else if ("entityRecognitionPiiTasks".equals(taskName)) {
actionResult = recognizePiiEntitiesActionResults.get(taskIndex);
} else if ("keyPhraseExtractionTasks".equals(taskName)) {
actionResult = extractKeyPhrasesActionResults.get(taskIndex);
} else {
throw logger.logExceptionAsError(new RuntimeException(
"Invalid task name in target reference, " + taskName));
}
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult,
new com.azure.ai.textanalytics.models.TextAnalyticsError(
TextAnalyticsErrorCode.fromString(
error.getCode() == null ? null : error.getCode().toString()),
error.getMessage(), null));
}
}
final AnalyzeBatchActionsResult analyzeBatchActionsResult = new AnalyzeBatchActionsResult();
final RequestStatistics requestStatistics = analyzeJobState.getStatistics();
TextDocumentBatchStatistics batchStatistics = null;
if (requestStatistics != null) {
batchStatistics = new TextDocumentBatchStatistics(
requestStatistics.getDocumentsCount(), requestStatistics.getErroneousDocumentsCount(),
requestStatistics.getValidDocumentsCount(), requestStatistics.getTransactionsCount()
);
}
AnalyzeBatchActionsResultPropertiesHelper.setStatistics(analyzeBatchActionsResult, batchStatistics);
AnalyzeBatchActionsResultPropertiesHelper.setRecognizeEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizeEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setRecognizePiiEntitiesActionResults(analyzeBatchActionsResult,
IterableStream.of(recognizePiiEntitiesActionResults));
AnalyzeBatchActionsResultPropertiesHelper.setExtractKeyPhrasesActionResults(analyzeBatchActionsResult,
IterableStream.of(extractKeyPhrasesActionResults));
return analyzeBatchActionsResult;
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET = "
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
}
} | class AnalyzeBatchActionsAsyncClient {
private static final String REGEX_ACTION_ERROR_TARGET =
"
private final ClientLogger logger = new ClientLogger(AnalyzeBatchActionsAsyncClient.class);
private final TextAnalyticsClientImpl service;
AnalyzeBatchActionsAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedFlux<AnalyzeBatchActionsResult>> beginAnalyzeBatchActions(
Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeBatchActionsOptions options,
Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail textAnalyticsOperationResult =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper
.setOperationId(textAnalyticsOperationResult,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return textAnalyticsOperationResult;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext)))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
PollerFlux<AnalyzeBatchActionsOperationDetail, PagedIterable<AnalyzeBatchActionsResult>>
beginAnalyzeBatchActionsIterable(Iterable<TextDocumentInput> documents, TextAnalyticsActions actions,
AnalyzeBatchActionsOptions options, Context context) {
try {
inputDocumentsValidation(documents);
options = getNotNullAnalyzeBatchActionsOptions(options);
final Context finalContext = getNotNullContext(context);
final AnalyzeBatchInput analyzeBatchInput =
new AnalyzeBatchInput()
.setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)))
.setTasks(getJobManifestTasks(actions));
analyzeBatchInput.setDisplayName(actions.getDisplayName());
final boolean finalIncludeStatistics = options.isIncludeStatistics();
return new PollerFlux<>(
DEFAULT_POLL_INTERVAL,
activationOperation(
service.analyzeWithResponseAsync(analyzeBatchInput,
finalContext.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.map(analyzeResponse -> {
final AnalyzeBatchActionsOperationDetail operationDetail =
new AnalyzeBatchActionsOperationDetail();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setOperationId(operationDetail,
parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation()));
return operationDetail;
})),
pollingOperation(operationId -> service.analyzeStatusWithResponseAsync(operationId,
finalIncludeStatistics, null, null, finalContext)),
(activationResponse, pollingContext) ->
Mono.error(new RuntimeException("Cancellation is not supported.")),
fetchingOperationIterable(operationId -> Mono.just(new PagedIterable<>(getAnalyzeOperationFluxPage(
operationId, null, null, finalIncludeStatistics, finalContext))))
);
} catch (RuntimeException ex) {
return PollerFlux.error(ex);
}
}
private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) {
return new JobManifestTasks()
.setEntityRecognitionTasks(actions.getRecognizeEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizeEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final EntitiesTask entitiesTask = new EntitiesTask();
entitiesTask.setParameters(
new EntitiesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return entitiesTask;
}).collect(Collectors.toList()))
.setEntityRecognitionPiiTasks(actions.getRecognizePiiEntitiesOptions() == null ? null
: StreamSupport.stream(actions.getRecognizePiiEntitiesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final PiiTask piiTask = new PiiTask();
piiTask.setParameters(
new PiiTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion()))
.setDomain(PiiTaskParametersDomain.fromString(
action.getDomainFilter() == null ? null
: action.getDomainFilter().toString())));
return piiTask;
}).collect(Collectors.toList()))
.setKeyPhraseExtractionTasks(actions.getExtractKeyPhrasesOptions() == null ? null
: StreamSupport.stream(actions.getExtractKeyPhrasesOptions().spliterator(), false).map(
action -> {
if (action == null) {
return null;
}
final KeyPhrasesTask keyPhrasesTask = new KeyPhrasesTask();
keyPhrasesTask.setParameters(
new KeyPhrasesTaskParameters()
.setModelVersion(getNotNullModelVersion(action.getModelVersion())));
return keyPhrasesTask;
}).collect(Collectors.toList()));
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<AnalyzeBatchActionsOperationDetail>>
activationOperation(Mono<AnalyzeBatchActionsOperationDetail> operationResult) {
return pollingContext -> {
try {
return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PollResponse<AnalyzeBatchActionsOperationDetail>>>
pollingOperation(Function<String, Mono<Response<AnalyzeJobState>>> pollingFunction) {
return pollingContext -> {
try {
final PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse =
pollingContext.getLatestResponse();
final String operationId = operationResultPollResponse.getValue().getOperationId();
return pollingFunction.apply(operationId)
.flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse))
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedFlux<AnalyzeBatchActionsResult>>>
fetchingOperation(Function<String, Mono<PagedFlux<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
private Function<PollingContext<AnalyzeBatchActionsOperationDetail>, Mono<PagedIterable<AnalyzeBatchActionsResult>>>
fetchingOperationIterable(Function<String, Mono<PagedIterable<AnalyzeBatchActionsResult>>> fetchingFunction) {
return pollingContext -> {
try {
final String operationId = pollingContext.getLatestResponse().getValue().getOperationId();
return fetchingFunction.apply(operationId);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
};
}
PagedFlux<AnalyzeBatchActionsResult> getAnalyzeOperationFluxPage(String operationId, Integer top, Integer skip,
boolean showStats, Context context) {
return new PagedFlux<>(
() -> getPage(null, operationId, top, skip, showStats, context),
continuationToken -> getPage(continuationToken, operationId, top, skip, showStats, context));
}
Mono<PagedResponse<AnalyzeBatchActionsResult>> getPage(String continuationToken, String operationId, Integer top,
Integer skip, boolean showStats, Context context) {
if (continuationToken != null) {
final Map<String, Integer> continuationTokenMap = parseNextLink(continuationToken);
final Integer topValue = continuationTokenMap.getOrDefault("$top", null);
final Integer skipValue = continuationTokenMap.getOrDefault("$skip", null);
return service.analyzeStatusWithResponseAsync(operationId, showStats, topValue, skipValue, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
} else {
return service.analyzeStatusWithResponseAsync(operationId, showStats, top, skip, context)
.map(this::toAnalyzeTasksPagedResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
}
private PagedResponse<AnalyzeBatchActionsResult> toAnalyzeTasksPagedResponse(Response<AnalyzeJobState> response) {
final AnalyzeJobState analyzeJobState = response.getValue();
return new PagedResponseBase<Void, AnalyzeBatchActionsResult>(
response.getRequest(),
response.getStatusCode(),
response.getHeaders(),
Arrays.asList(toAnalyzeTasks(analyzeJobState)),
analyzeJobState.getNextLink(),
null);
}
private Mono<PollResponse<AnalyzeBatchActionsOperationDetail>> processAnalyzedModelResponse(
Response<AnalyzeJobState> analyzeJobStateResponse,
PollResponse<AnalyzeBatchActionsOperationDetail> operationResultPollResponse) {
LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) {
switch (analyzeJobStateResponse.getValue().getStatus()) {
case NOT_STARTED:
case RUNNING:
status = LongRunningOperationStatus.IN_PROGRESS;
break;
case SUCCEEDED:
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
break;
case CANCELLED:
status = LongRunningOperationStatus.USER_CANCELLED;
break;
default:
status = LongRunningOperationStatus.fromString(
analyzeJobStateResponse.getValue().getStatus().toString(), true);
break;
}
}
AnalyzeBatchActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getDisplayName());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getCreatedDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getExpirationDateTime());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(),
analyzeJobStateResponse.getValue().getLastUpdateDateTime());
final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks();
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(),
tasksResult.getFailed());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(),
tasksResult.getInProgress());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsSucceeded(
operationResultPollResponse.getValue(), tasksResult.getCompleted());
AnalyzeBatchActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(),
tasksResult.getTotal());
return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue()));
}
private Context getNotNullContext(Context context) {
return context == null ? Context.NONE : context;
}
private AnalyzeBatchActionsOptions getNotNullAnalyzeBatchActionsOptions(AnalyzeBatchActionsOptions options) {
return options == null ? new AnalyzeBatchActionsOptions() : options;
}
private String getNotNullModelVersion(String modelVersion) {
return modelVersion == null ? "latest" : modelVersion;
}
private String[] parseActionErrorTarget(String targetReference) {
if (CoreUtils.isNullOrEmpty(targetReference)) {
throw logger.logExceptionAsError(new RuntimeException(
"Expected an error with a target field referencing an action but did not get one"));
}
final Pattern pattern = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE);
final Matcher matcher = pattern.matcher(targetReference);
String[] taskNameIdPair = new String[2];
while (matcher.find()) {
taskNameIdPair[0] = matcher.group(1);
taskNameIdPair[1] = matcher.group(2);
}
return taskNameIdPair;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.