comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
Correct: The `CosmosClientException` originates in the Direct TCP stack and so `reactor.core.Exceptions.unwrap` is not needed. | public Mono<StoreResponse> invokeStoreAsync(final Uri addressUri, final RxDocumentServiceRequest request) {
logger.debug("RntbdTransportClient.invokeStoreAsync({}, {})", addressUri, request);
checkNotNull(addressUri, "expected non-null address");
checkNotNull(request, "expected non-null request");
this.throwIfClosed();
URI address = addressUri.getURI();
final RntbdRequestArgs requestArgs = new RntbdRequestArgs(request, address);
requestArgs.traceOperation(logger, null, "invokeStoreAsync");
final RntbdEndpoint endpoint = this.endpointProvider.get(address);
final RntbdRequestRecord record = endpoint.request(requestArgs);
logger.debug("RntbdTransportClient.invokeStoreAsync({}, {}): {}", address, request, record);
return Mono.fromFuture(record.whenComplete((response, throwable) -> {
record.stage(RntbdRequestRecord.Stage.COMPLETED);
if (throwable == null) {
response.setRequestTimeline(record.takeTimelineSnapshot());
} else if (throwable instanceof CosmosClientException) {
CosmosClientException error = (CosmosClientException) throwable;
BridgeInternal.setRequestTimeline(error, record.takeTimelineSnapshot());
}
})).doOnCancel(() -> {
record.cancel(true);
});
} | } else if (throwable instanceof CosmosClientException) { | public Mono<StoreResponse> invokeStoreAsync(final Uri addressUri, final RxDocumentServiceRequest request) {
logger.debug("RntbdTransportClient.invokeStoreAsync({}, {})", addressUri, request);
checkNotNull(addressUri, "expected non-null address");
checkNotNull(request, "expected non-null request");
this.throwIfClosed();
URI address = addressUri.getURI();
final RntbdRequestArgs requestArgs = new RntbdRequestArgs(request, address);
requestArgs.traceOperation(logger, null, "invokeStoreAsync");
final RntbdEndpoint endpoint = this.endpointProvider.get(address);
final RntbdRequestRecord record = endpoint.request(requestArgs);
logger.debug("RntbdTransportClient.invokeStoreAsync({}, {}): {}", address, request, record);
return Mono.fromFuture(record.whenComplete((response, throwable) -> {
record.stage(RntbdRequestRecord.Stage.COMPLETED);
if (throwable == null) {
response.setRequestTimeline(record.takeTimelineSnapshot());
} else if (throwable instanceof CosmosClientException) {
CosmosClientException error = (CosmosClientException) throwable;
BridgeInternal.setRequestTimeline(error, record.takeTimelineSnapshot());
}
})).doOnCancel(() -> {
record.cancel(true);
});
} | class RntbdTransportClient extends TransportClient {
private static final String TAG_NAME = RntbdTransportClient.class.getSimpleName();
private static final AtomicLong instanceCount = new AtomicLong();
private static final Logger logger = LoggerFactory.getLogger(RntbdTransportClient.class);
private final AtomicBoolean closed = new AtomicBoolean();
private final RntbdEndpoint.Provider endpointProvider;
private final long id;
private final Tag tag;
RntbdTransportClient(final RntbdEndpoint.Provider endpointProvider) {
this.endpointProvider = endpointProvider;
this.id = instanceCount.incrementAndGet();
this.tag = RntbdTransportClient.tag(this.id);
}
RntbdTransportClient(final Options options, final SslContext sslContext) {
this.endpointProvider = new RntbdServiceEndpoint.Provider(this, options, sslContext);
this.id = instanceCount.incrementAndGet();
this.tag = RntbdTransportClient.tag(this.id);
}
RntbdTransportClient(final Configs configs, final int requestTimeoutInSeconds, final UserAgentContainer userAgent) {
this(new Options.Builder(requestTimeoutInSeconds).userAgent(userAgent).build(), configs.getSslContext());
}
public boolean isClosed() {
return this.closed.get();
}
@Override
public void close() {
if (this.closed.compareAndSet(false, true)) {
logger.debug("close {}", this);
this.endpointProvider.close();
return;
}
logger.debug("already closed {}", this);
}
public int endpointCount() {
return this.endpointProvider.count();
}
public int endpointEvictionCount() {
return this.endpointProvider.evictions();
}
public long id() {
return this.id;
}
@Override
public Tag tag() {
return this.tag;
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
private static Tag tag(long id) {
return Tag.of(TAG_NAME, Strings.padStart(Long.toHexString(id).toUpperCase(), 4, '0'));
}
private void throwIfClosed() {
checkState(!this.closed.get(), "%s is closed", this);
}
public static final class Options {
@JsonProperty()
private final int bufferPageSize;
@JsonProperty()
private final Duration connectionTimeout;
@JsonProperty()
private final Duration idleChannelTimeout;
@JsonProperty()
private final Duration idleEndpointTimeout;
@JsonProperty()
private final int maxBufferCapacity;
@JsonProperty()
private final int maxChannelsPerEndpoint;
@JsonProperty()
private final int maxRequestsPerChannel;
@JsonProperty()
private final Duration receiveHangDetectionTime;
@JsonProperty()
private final Duration requestExpiryInterval;
@JsonProperty()
private final Duration requestTimeout;
@JsonProperty()
private final Duration requestTimerResolution;
@JsonProperty()
private final Duration sendHangDetectionTime;
@JsonProperty()
private final Duration shutdownTimeout;
@JsonIgnore()
private final UserAgentContainer userAgent;
private Options() {
this.bufferPageSize = 8192;
this.connectionTimeout = null;
this.idleChannelTimeout = Duration.ZERO;
this.idleEndpointTimeout = Duration.ofSeconds(70L);
this.maxBufferCapacity = 8192 << 10;
this.maxChannelsPerEndpoint = 10;
this.maxRequestsPerChannel = 30;
this.receiveHangDetectionTime = Duration.ofSeconds(65L);
this.requestExpiryInterval = Duration.ofSeconds(5L);
this.requestTimeout = null;
this.requestTimerResolution = Duration.ofMillis(5L);
this.sendHangDetectionTime = Duration.ofSeconds(10L);
this.shutdownTimeout = Duration.ofSeconds(15L);
this.userAgent = new UserAgentContainer();
}
private Options(Builder builder) {
this.bufferPageSize = builder.bufferPageSize;
this.idleChannelTimeout = builder.idleChannelTimeout;
this.idleEndpointTimeout = builder.idleEndpointTimeout;
this.maxBufferCapacity = builder.maxBufferCapacity;
this.maxChannelsPerEndpoint = builder.maxChannelsPerEndpoint;
this.maxRequestsPerChannel = builder.maxRequestsPerChannel;
this.receiveHangDetectionTime = builder.receiveHangDetectionTime;
this.requestExpiryInterval = builder.requestExpiryInterval;
this.requestTimeout = builder.requestTimeout;
this.requestTimerResolution = builder.requestTimerResolution;
this.sendHangDetectionTime = builder.sendHangDetectionTime;
this.shutdownTimeout = builder.shutdownTimeout;
this.userAgent = builder.userAgent;
this.connectionTimeout = builder.connectionTimeout == null
? builder.requestTimeout
: builder.connectionTimeout;
}
public int bufferPageSize() {
return this.bufferPageSize;
}
public Duration connectionTimeout() {
return this.connectionTimeout;
}
public Duration idleChannelTimeout() {
return this.idleChannelTimeout;
}
public Duration idleEndpointTimeout() {
return this.idleEndpointTimeout;
}
public int maxBufferCapacity() {
return this.maxBufferCapacity;
}
public int maxChannelsPerEndpoint() {
return this.maxChannelsPerEndpoint;
}
public int maxRequestsPerChannel() {
return this.maxRequestsPerChannel;
}
public Duration receiveHangDetectionTime() {
return this.receiveHangDetectionTime;
}
public Duration requestExpiryInterval() {
return this.requestExpiryInterval;
}
public Duration requestTimeout() {
return this.requestTimeout;
}
public Duration requestTimerResolution() {
return this.requestTimerResolution;
}
public Duration sendHangDetectionTime() {
return this.sendHangDetectionTime;
}
public Duration shutdownTimeout() {
return this.shutdownTimeout;
}
public UserAgentContainer userAgent() {
return this.userAgent;
}
@Override
public String toString() {
return RntbdObjectMapper.toJson(this);
}
/**
* A builder for constructing {@link Options} instances.
*
* <h3>Using system properties to set the default {@link Options} used by an {@link Builder}</h3>
* <p>
* A default options instance is created when the {@link Builder} class is initialized. This instance specifies
* the default options used by every {@link Builder} instance. In priority order the default options instance
* is created from:
* <ol>
* <li>The JSON value of system property {@code azure.cosmos.directTcp.defaultOptions}.
* <p>Example:
* <pre>{@code -Dazure.cosmos.directTcp.defaultOptions={\"maxChannelsPerEndpoint\":5,\"maxRequestsPerChannel\":30}}</pre>
* </li>
* <li>The contents of the JSON file located by system property {@code azure.cosmos.directTcp
* .defaultOptionsFile}.
* <p>Example:
* <pre>{@code -Dazure.cosmos.directTcp.defaultOptionsFile=/path/to/default/options/file}</pre>
* </li>
* <li>The contents of JSON resource file {@code azure.cosmos.directTcp.defaultOptions.json}.
* <p>Specifically, the resource file is read from this stream:
* <pre>{@code RntbdTransportClient.class.getClassLoader().getResourceAsStream("azure.cosmos.directTcp.defaultOptions.json")}</pre>
* <p>Example: <pre>{@code {
* "bufferPageSize": 8192,
* "connectionTimeout": "PT1M",
* "idleChannelTimeout": "PT0S",
* "idleEndpointTimeout": "PT1M10S",
* "maxBufferCapacity": 8388608,
* "maxChannelsPerEndpoint": 10,
* "maxRequestsPerChannel": 30,
* "receiveHangDetectionTime": "PT1M5S",
* "requestExpiryInterval": "PT5S",
* "requestTimeout": "PT1M",
* "requestTimerResolution": "PT0.5S",
* "sendHangDetectionTime": "PT10S",
* "shutdownTimeout": "PT15S"
* }}</pre>
* </li>
* </ol>
* <p>JSON value errors are logged and then ignored. If none of the above values are available or all available
* values are in error, the default options instance is created from the private parameterless constructor for
* {@link Options}.
*/
@SuppressWarnings("UnusedReturnValue")
public static class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options();
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private UserAgentContainer userAgent;
public Builder(Duration requestTimeout) {
this.requestTimeout(requestTimeout);
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionTimeout = DEFAULT_OPTIONS.connectionTimeout;
this.idleChannelTimeout = DEFAULT_OPTIONS.idleChannelTimeout;
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = DEFAULT_OPTIONS.maxChannelsPerEndpoint;
this.maxRequestsPerChannel = DEFAULT_OPTIONS.maxRequestsPerChannel;
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.userAgent = DEFAULT_OPTIONS.userAgent;
}
public Builder(int requestTimeoutInSeconds) {
this(Duration.ofSeconds(requestTimeoutInSeconds));
}
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectionTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
}
}
static final class JsonSerializer extends StdSerializer<RntbdTransportClient> {
private static final long serialVersionUID = 1007663695768825670L;
JsonSerializer() {
super(RntbdTransportClient.class);
}
@Override
public void serialize(
final RntbdTransportClient value,
final JsonGenerator generator,
final SerializerProvider provider
) throws IOException {
generator.writeStartObject();
generator.writeNumberField("id", value.id());
generator.writeBooleanField("isClosed", value.isClosed());
generator.writeObjectField("configuration", value.endpointProvider.config());
generator.writeObjectFieldStart("serviceEndpoints");
generator.writeNumberField("count", value.endpointCount());
generator.writeArrayFieldStart("items");
for (final Iterator<RntbdEndpoint> iterator = value.endpointProvider.list().iterator(); iterator.hasNext(); ) {
generator.writeObject(iterator.next());
}
generator.writeEndArray();
generator.writeEndObject();
generator.writeEndObject();
}
}
} | class RntbdTransportClient extends TransportClient {
private static final String TAG_NAME = RntbdTransportClient.class.getSimpleName();
private static final AtomicLong instanceCount = new AtomicLong();
private static final Logger logger = LoggerFactory.getLogger(RntbdTransportClient.class);
private final AtomicBoolean closed = new AtomicBoolean();
private final RntbdEndpoint.Provider endpointProvider;
private final long id;
private final Tag tag;
RntbdTransportClient(final RntbdEndpoint.Provider endpointProvider) {
this.endpointProvider = endpointProvider;
this.id = instanceCount.incrementAndGet();
this.tag = RntbdTransportClient.tag(this.id);
}
RntbdTransportClient(final Options options, final SslContext sslContext) {
this.endpointProvider = new RntbdServiceEndpoint.Provider(this, options, sslContext);
this.id = instanceCount.incrementAndGet();
this.tag = RntbdTransportClient.tag(this.id);
}
RntbdTransportClient(final Configs configs, final int requestTimeoutInSeconds, final UserAgentContainer userAgent) {
this(new Options.Builder(requestTimeoutInSeconds).userAgent(userAgent).build(), configs.getSslContext());
}
public boolean isClosed() {
return this.closed.get();
}
@Override
public void close() {
if (this.closed.compareAndSet(false, true)) {
logger.debug("close {}", this);
this.endpointProvider.close();
return;
}
logger.debug("already closed {}", this);
}
public int endpointCount() {
return this.endpointProvider.count();
}
public int endpointEvictionCount() {
return this.endpointProvider.evictions();
}
public long id() {
return this.id;
}
@Override
public Tag tag() {
return this.tag;
}
@Override
public String toString() {
return RntbdObjectMapper.toString(this);
}
private static Tag tag(long id) {
return Tag.of(TAG_NAME, Strings.padStart(Long.toHexString(id).toUpperCase(), 4, '0'));
}
private void throwIfClosed() {
checkState(!this.closed.get(), "%s is closed", this);
}
public static final class Options {
@JsonProperty()
private final int bufferPageSize;
@JsonProperty()
private final Duration connectionTimeout;
@JsonProperty()
private final Duration idleChannelTimeout;
@JsonProperty()
private final Duration idleEndpointTimeout;
@JsonProperty()
private final int maxBufferCapacity;
@JsonProperty()
private final int maxChannelsPerEndpoint;
@JsonProperty()
private final int maxRequestsPerChannel;
@JsonProperty()
private final Duration receiveHangDetectionTime;
@JsonProperty()
private final Duration requestExpiryInterval;
@JsonProperty()
private final Duration requestTimeout;
@JsonProperty()
private final Duration requestTimerResolution;
@JsonProperty()
private final Duration sendHangDetectionTime;
@JsonProperty()
private final Duration shutdownTimeout;
@JsonIgnore()
private final UserAgentContainer userAgent;
private Options() {
this.bufferPageSize = 8192;
this.connectionTimeout = null;
this.idleChannelTimeout = Duration.ZERO;
this.idleEndpointTimeout = Duration.ofSeconds(70L);
this.maxBufferCapacity = 8192 << 10;
this.maxChannelsPerEndpoint = 10;
this.maxRequestsPerChannel = 30;
this.receiveHangDetectionTime = Duration.ofSeconds(65L);
this.requestExpiryInterval = Duration.ofSeconds(5L);
this.requestTimeout = null;
this.requestTimerResolution = Duration.ofMillis(5L);
this.sendHangDetectionTime = Duration.ofSeconds(10L);
this.shutdownTimeout = Duration.ofSeconds(15L);
this.userAgent = new UserAgentContainer();
}
private Options(Builder builder) {
this.bufferPageSize = builder.bufferPageSize;
this.idleChannelTimeout = builder.idleChannelTimeout;
this.idleEndpointTimeout = builder.idleEndpointTimeout;
this.maxBufferCapacity = builder.maxBufferCapacity;
this.maxChannelsPerEndpoint = builder.maxChannelsPerEndpoint;
this.maxRequestsPerChannel = builder.maxRequestsPerChannel;
this.receiveHangDetectionTime = builder.receiveHangDetectionTime;
this.requestExpiryInterval = builder.requestExpiryInterval;
this.requestTimeout = builder.requestTimeout;
this.requestTimerResolution = builder.requestTimerResolution;
this.sendHangDetectionTime = builder.sendHangDetectionTime;
this.shutdownTimeout = builder.shutdownTimeout;
this.userAgent = builder.userAgent;
this.connectionTimeout = builder.connectionTimeout == null
? builder.requestTimeout
: builder.connectionTimeout;
}
public int bufferPageSize() {
return this.bufferPageSize;
}
public Duration connectionTimeout() {
return this.connectionTimeout;
}
public Duration idleChannelTimeout() {
return this.idleChannelTimeout;
}
public Duration idleEndpointTimeout() {
return this.idleEndpointTimeout;
}
public int maxBufferCapacity() {
return this.maxBufferCapacity;
}
public int maxChannelsPerEndpoint() {
return this.maxChannelsPerEndpoint;
}
public int maxRequestsPerChannel() {
return this.maxRequestsPerChannel;
}
public Duration receiveHangDetectionTime() {
return this.receiveHangDetectionTime;
}
public Duration requestExpiryInterval() {
return this.requestExpiryInterval;
}
public Duration requestTimeout() {
return this.requestTimeout;
}
public Duration requestTimerResolution() {
return this.requestTimerResolution;
}
public Duration sendHangDetectionTime() {
return this.sendHangDetectionTime;
}
public Duration shutdownTimeout() {
return this.shutdownTimeout;
}
public UserAgentContainer userAgent() {
return this.userAgent;
}
@Override
public String toString() {
return RntbdObjectMapper.toJson(this);
}
/**
* A builder for constructing {@link Options} instances.
*
* <h3>Using system properties to set the default {@link Options} used by an {@link Builder}</h3>
* <p>
* A default options instance is created when the {@link Builder} class is initialized. This instance specifies
* the default options used by every {@link Builder} instance. In priority order the default options instance
* is created from:
* <ol>
* <li>The JSON value of system property {@code azure.cosmos.directTcp.defaultOptions}.
* <p>Example:
* <pre>{@code -Dazure.cosmos.directTcp.defaultOptions={\"maxChannelsPerEndpoint\":5,\"maxRequestsPerChannel\":30}}</pre>
* </li>
* <li>The contents of the JSON file located by system property {@code azure.cosmos.directTcp
* .defaultOptionsFile}.
* <p>Example:
* <pre>{@code -Dazure.cosmos.directTcp.defaultOptionsFile=/path/to/default/options/file}</pre>
* </li>
* <li>The contents of JSON resource file {@code azure.cosmos.directTcp.defaultOptions.json}.
* <p>Specifically, the resource file is read from this stream:
* <pre>{@code RntbdTransportClient.class.getClassLoader().getResourceAsStream("azure.cosmos.directTcp.defaultOptions.json")}</pre>
* <p>Example: <pre>{@code {
* "bufferPageSize": 8192,
* "connectionTimeout": "PT1M",
* "idleChannelTimeout": "PT0S",
* "idleEndpointTimeout": "PT1M10S",
* "maxBufferCapacity": 8388608,
* "maxChannelsPerEndpoint": 10,
* "maxRequestsPerChannel": 30,
* "receiveHangDetectionTime": "PT1M5S",
* "requestExpiryInterval": "PT5S",
* "requestTimeout": "PT1M",
* "requestTimerResolution": "PT0.5S",
* "sendHangDetectionTime": "PT10S",
* "shutdownTimeout": "PT15S"
* }}</pre>
* </li>
* </ol>
* <p>JSON value errors are logged and then ignored. If none of the above values are available or all available
* values are in error, the default options instance is created from the private parameterless constructor for
* {@link Options}.
*/
@SuppressWarnings("UnusedReturnValue")
public static class Builder {
private static final String DEFAULT_OPTIONS_PROPERTY_NAME = "azure.cosmos.directTcp.defaultOptions";
private static final Options DEFAULT_OPTIONS;
static {
Options options = null;
try {
final String string = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME);
if (string != null) {
try {
options = RntbdObjectMapper.readValue(string, Options.class);
} catch (IOException error) {
logger.error("failed to parse default Direct TCP options {} due to ", string, error);
}
}
if (options == null) {
final String path = System.getProperty(DEFAULT_OPTIONS_PROPERTY_NAME + "File");
if (path != null) {
try {
options = RntbdObjectMapper.readValue(new File(path), Options.class);
} catch (IOException error) {
logger.error("failed to load default Direct TCP options from {} due to ", path, error);
}
}
}
if (options == null) {
final ClassLoader loader = RntbdTransportClient.class.getClassLoader();
final String name = DEFAULT_OPTIONS_PROPERTY_NAME + ".json";
try (InputStream stream = loader.getResourceAsStream(name)) {
if (stream != null) {
options = RntbdObjectMapper.readValue(stream, Options.class);
}
} catch (IOException error) {
logger.error("failed to load Direct TCP options from resource {} due to ", name, error);
}
}
} finally {
if (options == null) {
DEFAULT_OPTIONS = new Options();
} else {
logger.info("Updated default Direct TCP options from system property {}: {}",
DEFAULT_OPTIONS_PROPERTY_NAME,
options);
DEFAULT_OPTIONS = options;
}
}
}
private int bufferPageSize;
private Duration connectionTimeout;
private Duration idleChannelTimeout;
private Duration idleEndpointTimeout;
private int maxBufferCapacity;
private int maxChannelsPerEndpoint;
private int maxRequestsPerChannel;
private Duration receiveHangDetectionTime;
private Duration requestExpiryInterval;
private Duration requestTimeout;
private Duration requestTimerResolution;
private Duration sendHangDetectionTime;
private Duration shutdownTimeout;
private UserAgentContainer userAgent;
public Builder(Duration requestTimeout) {
this.requestTimeout(requestTimeout);
this.bufferPageSize = DEFAULT_OPTIONS.bufferPageSize;
this.connectionTimeout = DEFAULT_OPTIONS.connectionTimeout;
this.idleChannelTimeout = DEFAULT_OPTIONS.idleChannelTimeout;
this.idleEndpointTimeout = DEFAULT_OPTIONS.idleEndpointTimeout;
this.maxBufferCapacity = DEFAULT_OPTIONS.maxBufferCapacity;
this.maxChannelsPerEndpoint = DEFAULT_OPTIONS.maxChannelsPerEndpoint;
this.maxRequestsPerChannel = DEFAULT_OPTIONS.maxRequestsPerChannel;
this.receiveHangDetectionTime = DEFAULT_OPTIONS.receiveHangDetectionTime;
this.requestExpiryInterval = DEFAULT_OPTIONS.requestExpiryInterval;
this.requestTimerResolution = DEFAULT_OPTIONS.requestTimerResolution;
this.sendHangDetectionTime = DEFAULT_OPTIONS.sendHangDetectionTime;
this.shutdownTimeout = DEFAULT_OPTIONS.shutdownTimeout;
this.userAgent = DEFAULT_OPTIONS.userAgent;
}
public Builder(int requestTimeoutInSeconds) {
this(Duration.ofSeconds(requestTimeoutInSeconds));
}
public Builder bufferPageSize(final int value) {
checkArgument(value >= 4096 && (value & (value - 1)) == 0,
"expected value to be a power of 2 >= 4096, not %s",
value);
this.bufferPageSize = value;
return this;
}
public Options build() {
checkState(this.bufferPageSize <= this.maxBufferCapacity,
"expected bufferPageSize (%s) <= maxBufferCapacity (%s)",
this.bufferPageSize,
this.maxBufferCapacity);
return new Options(this);
}
public Builder connectionTimeout(final Duration value) {
checkArgument(value == null || value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.connectionTimeout = value;
return this;
}
public Builder idleChannelTimeout(final Duration value) {
checkNotNull(value, "expected non-null value");
this.idleChannelTimeout = value;
return this;
}
public Builder idleEndpointTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.idleEndpointTimeout = value;
return this;
}
public Builder maxBufferCapacity(final int value) {
checkArgument(value > 0 && (value & (value - 1)) == 0,
"expected positive value, not %s",
value);
this.maxBufferCapacity = value;
return this;
}
public Builder maxChannelsPerEndpoint(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxChannelsPerEndpoint = value;
return this;
}
public Builder maxRequestsPerChannel(final int value) {
checkArgument(value > 0, "expected positive value, not %s", value);
this.maxRequestsPerChannel = value;
return this;
}
public Builder receiveHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.receiveHangDetectionTime = value;
return this;
}
public Builder requestExpiryInterval(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestExpiryInterval = value;
return this;
}
public Builder requestTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimeout = value;
return this;
}
public Builder requestTimerResolution(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.requestTimerResolution = value;
return this;
}
public Builder sendHangDetectionTime(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.sendHangDetectionTime = value;
return this;
}
public Builder shutdownTimeout(final Duration value) {
checkArgument(value != null && value.compareTo(Duration.ZERO) > 0,
"expected positive value, not %s",
value);
this.shutdownTimeout = value;
return this;
}
public Builder userAgent(final UserAgentContainer value) {
checkNotNull(value, "expected non-null value");
this.userAgent = value;
return this;
}
}
}
static final class JsonSerializer extends StdSerializer<RntbdTransportClient> {
private static final long serialVersionUID = 1007663695768825670L;
JsonSerializer() {
super(RntbdTransportClient.class);
}
@Override
public void serialize(
final RntbdTransportClient value,
final JsonGenerator generator,
final SerializerProvider provider
) throws IOException {
generator.writeStartObject();
generator.writeNumberField("id", value.id());
generator.writeBooleanField("isClosed", value.isClosed());
generator.writeObjectField("configuration", value.endpointProvider.config());
generator.writeObjectFieldStart("serviceEndpoints");
generator.writeNumberField("count", value.endpointCount());
generator.writeArrayFieldStart("items");
for (final Iterator<RntbdEndpoint> iterator = value.endpointProvider.list().iterator(); iterator.hasNext(); ) {
generator.writeObject(iterator.next());
}
generator.writeEndArray();
generator.writeEndObject();
generator.writeEndObject();
}
}
} |
nit: can remove final here and other examples. | public void detectLanguageForListInputTextsWithResponse() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguagesWithResponse(textInputs, "US", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
} | final DocumentResultCollection<DetectLanguageResult> detectLanguageResults = | public void detectLanguageForListInputTextsWithResponse() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguagesWithResponse(textInputs, "US", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private static final String SUBSCRIPTION_KEY = null;
private static final String ENDPOINT = null;
private final TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createAsyncTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.pipeline(pipeline)
.endpoint(ENDPOINT)
.subscriptionKey(SUBSCRIPTION_KEY)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.subscriptionKey(SUBSCRIPTION_KEY)
.endpoint(ENDPOINT)
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageSingleText() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForSingleInputTextAndCountryHintWithResponse() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguageWithResponse(
"This text is in English", "US", Context.NONE).getValue();
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTexts() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguages(textInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(),
detectedLanguage.getIso6391Name(),
detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInput() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguages(detectLanguageInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInputWithResponse() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguagesWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(),
detectedLanguage.getIso6391Name(),
detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleText() {
final RecognizeEntitiesResult recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleTextWithResponse() {
final RecognizeEntitiesResult recognizeEntitiesResult = textAnalyticsClient.recognizeEntitiesWithResponse(
"Satya Nadella is the CEO of Microsoft", "en", Context.NONE).getValue();
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleText() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntities("My SSN is 555-55-5555");
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleTextWithResponse() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntitiesWithResponse("My SSN is 555-55-5555", "en", Context.NONE)
.getValue();
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListText() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s,"
+ " score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleText() {
for (LinkedEntity linkedEntity : textAnalyticsClient.recognizeLinkedEntities(
"Old Faithful is a geyser at Yellowstone Park.").getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleTextWithResponse() {
final RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(
"Old Faithful is a geyser at Yellowstone Park.", "en", Context.NONE).getValue();
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleText() {
final List<String> keyPhrases =
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.").getKeyPhrases();
for (String keyPhrase : keyPhrases) {
System.out.printf("Recognized phrases: %s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleTextWithResponse() {
final ExtractKeyPhraseResult extractKeyPhrases = textAnalyticsClient.extractKeyPhrasesWithResponse(
"My cat might need to see a veterinarian.", "en", Context.NONE).getValue();
for (String keyPhrases : extractKeyPhrases.getKeyPhrases()) {
System.out.printf("Recognized phrases: %s.%n", keyPhrases);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListText() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrases(textInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrasesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrases(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrasesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleText() {
final AnalyzeSentimentResult sentimentResult =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleTextWithResponse() {
final AnalyzeSentimentResult sentimentResult = textAnalyticsClient.analyzeSentimentWithResponse(
"The hotel was dark and unclean.", "en", Context.NONE).getValue();
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListText() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentiment(textInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentimentWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentiment(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListTextWithResponse() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentimentWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private static final String SUBSCRIPTION_KEY = null;
private static final String ENDPOINT = null;
private final TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.pipeline(pipeline)
.endpoint(ENDPOINT)
.subscriptionKey(SUBSCRIPTION_KEY)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.subscriptionKey(SUBSCRIPTION_KEY)
.endpoint(ENDPOINT)
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageSingleText() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForSingleInputTextAndCountryHintWithResponse() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguageWithResponse(
"This text is in English", "US", Context.NONE).getValue();
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTexts() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguages(textInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInput() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguages(detectLanguageInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInputWithResponse() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguagesWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleText() {
final RecognizeEntitiesResult recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleTextWithResponse() {
final RecognizeEntitiesResult recognizeEntitiesResult = textAnalyticsClient.recognizeEntitiesWithResponse(
"Satya Nadella is the CEO of Microsoft", "en", Context.NONE).getValue();
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleText() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntities("My SSN is 555-55-5555");
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleTextWithResponse() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntitiesWithResponse("My SSN is 555-55-5555", "en", Context.NONE)
.getValue();
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListText() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleText() {
for (LinkedEntity linkedEntity : textAnalyticsClient.recognizeLinkedEntities(
"Old Faithful is a geyser at Yellowstone Park.").getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleTextWithResponse() {
final RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(
"Old Faithful is a geyser at Yellowstone Park.", "en", Context.NONE).getValue();
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleText() {
final List<String> keyPhrases =
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.").getKeyPhrases();
for (String keyPhrase : keyPhrases) {
System.out.printf("Recognized phrases: %s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleTextWithResponse() {
final ExtractKeyPhraseResult extractKeyPhrases = textAnalyticsClient.extractKeyPhrasesWithResponse(
"My cat might need to see a veterinarian.", "en", Context.NONE).getValue();
for (String keyPhrases : extractKeyPhrases.getKeyPhrases()) {
System.out.printf("Recognized phrases: %s.%n", keyPhrases);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListText() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrases(textInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrasesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrases(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrasesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleText() {
final AnalyzeSentimentResult sentimentResult =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleTextWithResponse() {
final AnalyzeSentimentResult sentimentResult = textAnalyticsClient.analyzeSentimentWithResponse(
"The hotel was dark and unclean.", "en", Context.NONE).getValue();
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListText() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentiment(textInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentimentWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentiment(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListTextWithResponse() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentimentWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
} |
Can make the snippet shorter? here and other examples. | public void recognizeEntitiesSingleText() {
final RecognizeEntitiesResult recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
} | entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(), | public void recognizeEntitiesSingleText() {
final RecognizeEntitiesResult recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private static final String SUBSCRIPTION_KEY = null;
private static final String ENDPOINT = null;
private final TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createAsyncTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.pipeline(pipeline)
.endpoint(ENDPOINT)
.subscriptionKey(SUBSCRIPTION_KEY)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.subscriptionKey(SUBSCRIPTION_KEY)
.endpoint(ENDPOINT)
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageSingleText() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForSingleInputTextAndCountryHintWithResponse() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguageWithResponse(
"This text is in English", "US", Context.NONE).getValue();
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTexts() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguages(textInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(),
detectedLanguage.getIso6391Name(),
detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTextsWithResponse() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguagesWithResponse(textInputs, "US", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInput() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguages(detectLanguageInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInputWithResponse() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguagesWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(),
detectedLanguage.getIso6391Name(),
detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleTextWithResponse() {
final RecognizeEntitiesResult recognizeEntitiesResult = textAnalyticsClient.recognizeEntitiesWithResponse(
"Satya Nadella is the CEO of Microsoft", "en", Context.NONE).getValue();
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleText() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntities("My SSN is 555-55-5555");
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleTextWithResponse() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntitiesWithResponse("My SSN is 555-55-5555", "en", Context.NONE)
.getValue();
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListText() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s,"
+ " score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf(
"Recognized PII entity: %s, entity type: %s, entity subtype: %s, offset: %s, length: %s, "
+ "score: %s.%n",
entity.getText(),
entity.getType(),
entity.getSubtype() == null || entity.getSubtype().isEmpty() ? "N/A" : entity.getSubtype(),
entity.getOffset(),
entity.getLength(),
entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleText() {
for (LinkedEntity linkedEntity : textAnalyticsClient.recognizeLinkedEntities(
"Old Faithful is a geyser at Yellowstone Park.").getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleTextWithResponse() {
final RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(
"Old Faithful is a geyser at Yellowstone Park.", "en", Context.NONE).getValue();
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleText() {
final List<String> keyPhrases =
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.").getKeyPhrases();
for (String keyPhrase : keyPhrases) {
System.out.printf("Recognized phrases: %s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleTextWithResponse() {
final ExtractKeyPhraseResult extractKeyPhrases = textAnalyticsClient.extractKeyPhrasesWithResponse(
"My cat might need to see a veterinarian.", "en", Context.NONE).getValue();
for (String keyPhrases : extractKeyPhrases.getKeyPhrases()) {
System.out.printf("Recognized phrases: %s.%n", keyPhrases);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListText() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrases(textInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrasesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrases(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrasesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleText() {
final AnalyzeSentimentResult sentimentResult =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleTextWithResponse() {
final AnalyzeSentimentResult sentimentResult = textAnalyticsClient.analyzeSentimentWithResponse(
"The hotel was dark and unclean.", "en", Context.NONE).getValue();
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListText() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentiment(textInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentimentWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentiment(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListTextWithResponse() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentimentWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, document count: %s, transaction count: %s, valid document count: %s.%n",
batchStatistics.getDocumentCount(),
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private static final String SUBSCRIPTION_KEY = null;
private static final String ENDPOINT = null;
private final TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.pipeline(pipeline)
.endpoint(ENDPOINT)
.subscriptionKey(SUBSCRIPTION_KEY)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.subscriptionKey(SUBSCRIPTION_KEY)
.endpoint(ENDPOINT)
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageSingleText() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForSingleInputTextAndCountryHintWithResponse() {
final DetectLanguageResult detectLanguageResult = textAnalyticsClient.detectLanguageWithResponse(
"This text is in English", "US", Context.NONE).getValue();
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected languages name: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTexts() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguages(textInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListInputTextsWithResponse() {
final List<String> textInputs = Arrays.asList(
"This is written in English",
"Este es un document escrito en Español.");
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectLanguagesWithResponse(textInputs, "US", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInput() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguages(detectLanguageInputs);
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageForListDetectedLanguageInputWithResponse() {
final List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un document escrito en Español.", "es")
);
final DocumentResultCollection<DetectLanguageResult> detectLanguageResults =
textAnalyticsClient.detectBatchLanguagesWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = detectLanguageResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : detectLanguageResults) {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
for (DetectedLanguage detectedLanguage : detectLanguageResult.getDetectedLanguages()) {
System.out.printf("Detected language: %s, ISO 6391 name: %s, score: %s.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesSingleTextWithResponse() {
final RecognizeEntitiesResult recognizeEntitiesResult = textAnalyticsClient.recognizeEntitiesWithResponse(
"Satya Nadella is the CEO of Microsoft", "en", Context.NONE).getValue();
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."),
new TextDocumentInput("1", "I work at Microsoft."));
final DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults =
textAnalyticsClient.recognizeBatchEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) {
for (NamedEntity entity : recognizeEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleText() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntities("My SSN is 555-55-5555");
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesSingleTextWithResponse() {
final RecognizePiiEntitiesResult recognizePiiEntitiesResult =
textAnalyticsClient.recognizePiiEntitiesWithResponse("My SSN is 555-55-5555", "en", Context.NONE)
.getValue();
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListText() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList("My SSN is 555-55-5555", "Visa card 4111 1111 1111 1111");
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizePiiEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111"));
final DocumentResultCollection<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
textAnalyticsClient.recognizeBatchPiiEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizePiiEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizePiiEntitiesResult recognizePiiEntitiesResult : recognizePiiEntitiesResults) {
for (NamedEntity entity : recognizePiiEntitiesResult.getNamedEntities()) {
System.out.printf("Recognized PII entity: %s, entity type: %s, score: %s.%n",
entity.getText(), entity.getType(), entity.getScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleText() {
for (LinkedEntity linkedEntity : textAnalyticsClient.recognizeLinkedEntities(
"Old Faithful is a geyser at Yellowstone Park.").getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesSingleTextWithResponse() {
final RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(
"Old Faithful is a geyser at Yellowstone Park.", "en", Context.NONE).getValue();
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListText() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntities(textInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds.");
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeLinkedEntitiesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntities(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void recognizeBatchLinkedEntitiesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en")
);
final DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
textAnalyticsClient.recognizeBatchLinkedEntitiesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) {
for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getLinkedEntities()) {
System.out.printf("Recognized linked entity: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getUrl(), linkedEntity.getDataSource());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleText() {
final List<String> keyPhrases =
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.").getKeyPhrases();
for (String keyPhrase : keyPhrases) {
System.out.printf("Recognized phrases: %s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesSingleTextWithResponse() {
final ExtractKeyPhraseResult extractKeyPhrases = textAnalyticsClient.extractKeyPhrasesWithResponse(
"My cat might need to see a veterinarian.", "en", Context.NONE).getValue();
for (String keyPhrases : extractKeyPhrases.getKeyPhrases()) {
System.out.printf("Recognized phrases: %s.%n", keyPhrases);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListText() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrases(textInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractKeyPhrasesWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrases(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesListTextWithResponse() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.", "en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en")
);
final DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults =
textAnalyticsClient.extractBatchKeyPhrasesWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
for (String keyPhrases : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("Extracted phrases: %s.%n", keyPhrases);
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleText() {
final AnalyzeSentimentResult sentimentResult =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentSingleTextWithResponse() {
final AnalyzeSentimentResult sentimentResult = textAnalyticsClient.analyzeSentimentWithResponse(
"The hotel was dark and unclean.", "en", Context.NONE).getValue();
final TextSentiment documentSentiment = sentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment textSentiment : sentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
textSentiment.getTextSentimentClass(),
textSentiment.getPositiveScore(),
textSentiment.getNeutralScore(),
textSentiment.getNegativeScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListText() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentiment(textInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentListTextWithResponse() {
final List<String> textInputs = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeSentimentWithResponse(textInputs, "en", Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListText() {
final List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentiment(textDocumentInputs);
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
/**
* Code snippet for
* {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentListTextWithResponse() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en")
);
final DocumentResultCollection<AnalyzeSentimentResult> analyzedBatchResult =
textAnalyticsClient.analyzeBatchSentimentWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setShowStatistics(true), Context.NONE).getValue();
final TextDocumentBatchStatistics batchStatistics = analyzedBatchResult.getStatistics();
System.out.printf(
"A batch of document statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (AnalyzeSentimentResult analyzeSentimentResult : analyzedBatchResult) {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
final TextSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
documentSentiment.getTextSentimentClass(),
documentSentiment.getPositiveScore(),
documentSentiment.getNeutralScore(),
documentSentiment.getNegativeScore());
for (TextSentiment sentenceSentiment : analyzeSentimentResult.getSentenceSentiments()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %s, neutral score: %s, negative score: %s.%n",
sentenceSentiment.getTextSentimentClass(),
sentenceSentiment.getPositiveScore(),
sentenceSentiment.getNeutralScore(),
sentenceSentiment.getNegativeScore());
}
}
}
} |
susceptible to SQL injection attack. | public Mono<Integer> readMinThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> Integer.parseInt(cosmosOfferResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.OFFER_MIN_THROUGHPUT)));
} | + cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions()) | public Mono<Integer> readMinThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers(
new SqlQuerySpec("select * from c where c.offerResourceId = @OFFER_RESOURCE_ID",
new SqlParameterList(new SqlParameter("@OFFER_RESOURCE_ID", cosmosContainerResponse.resourceSettings().resourceId()))), new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> Integer.parseInt(cosmosOfferResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.OFFER_MIN_THROUGHPUT)));
} | class CosmosContainer {
private CosmosDatabase database;
private String id;
private CosmosScripts scripts;
CosmosContainer(String id, CosmosDatabase database) {
this.id = id;
this.database = database;
}
/**
* Get the id of the {@link CosmosContainer}
*
* @return the id of the {@link CosmosContainer}
*/
public String id() {
return id;
}
/**
* Set the id of the {@link CosmosContainer}
*
* @param id the id of the {@link CosmosContainer}
* @return the same {@link CosmosContainer} that had the id set
*/
CosmosContainer id(String id) {
this.id = id;
return this;
}
/**
* Reads the document container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read() {
return read(new CosmosContainerRequestOptions());
}
/**
* Reads the document container by the container link.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @param options The cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().readCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted database. In case of failure the {@link Mono} will error.
*
* @param options the request options.
* @return an {@link Mono} containing the single cosmos container response for
* the deleted database or an error.
*/
public Mono<CosmosContainerResponse> delete(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().deleteCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response for
* the deleted container or an error.
*/
public Mono<CosmosContainerResponse> delete() {
return delete(new CosmosContainerRequestOptions());
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings) {
return replace(containerSettings, null);
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @param options the cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings,
CosmosContainerRequestOptions options) {
validateResource(containerSettings);
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper()
.replaceCollection(containerSettings.getV2Collection(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/* CosmosItem operations */
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item) {
return createItem(item, new CosmosItemRequestOptions());
}
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return database.getDocClientWrapper()
.createDocument(getLink(), CosmosItemProperties.fromObject(item), requestOptions, true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Upserts an item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item) {
return upsertItem(item, null);
}
/**
* Upserts a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return this.getDatabase().getDocClientWrapper()
.upsertDocument(this.getLink(), CosmosItemProperties.fromObject(item), options.toRequestOptions(), true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Reads all cosmos items in the container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems() {
return readAllItems(new FeedOptions());
}
/**
* Reads all cosmos items in a container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems(FeedOptions options) {
return getDatabase().getDocClientWrapper().readDocuments(getLink(), options).map(
response -> BridgeInternal.createFeedResponse(CosmosItemProperties.getFromV2Results(response.results()),
response.responseHeaders()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query) {
return queryItems(new SqlQuerySpec(query), null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query, FeedOptions options) {
return queryItems(new SqlQuerySpec(query), options);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec) {
return queryItems(querySpec, null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec, FeedOptions options) {
return getDatabase().getDocClientWrapper().queryDocuments(getLink(), querySpec, options)
.map(response -> BridgeInternal.createFeedResponseWithQueryMetrics(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(),
response.queryMetrics()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param changeFeedOptions the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryChangeFeedItems(ChangeFeedOptions changeFeedOptions) {
return getDatabase().getDocClientWrapper().queryDocumentChangeFeed(getLink(), changeFeedOptions)
.map(response -> new FeedResponse<CosmosItemProperties>(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(), false));
}
/**
* Gets a CosmosItem object without making a service call
*
* @param id id of the item
* @param partitionKey the partition key
* @return a cosmos item
*/
public CosmosItem getItem(String id, Object partitionKey) {
return new CosmosItem(id, partitionKey, this);
}
public CosmosScripts getScripts() {
if (this.scripts == null) {
this.scripts = new CosmosScripts(this);
}
return this.scripts;
}
/**
* Lists all the conflicts in the container
*
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> readAllConflicts(FeedOptions options) {
return database.getDocClientWrapper().readConflicts(getLink(), options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query) {
return queryConflicts(query, null);
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query, FeedOptions options) {
return database.getDocClientWrapper().queryConflicts(getLink(), query, options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Gets a CosmosConflict object without making a service call
*
* @param id id of the cosmos conflict
* @return a cosmos conflict
*/
public CosmosConflict getConflict(String id) {
return new CosmosConflict(id, this);
}
/**
* Gets the throughput of the container
*
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> readProvisionedThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> cosmosOfferResponse.getResource().getThroughput());
}
/**
* Gets the min throughput to which this container can be scaled down to
*
* @return a {@link Mono} containing min throughput or an error.
*/
/**
* Sets throughput provisioned for a container in measurement of
* Requests-per-Unit in the Azure Cosmos service.
*
* @param requestUnitsPerSecond the cosmos container throughput, expressed in
* Request Units per second
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> replaceProvisionedThroughput(int requestUnitsPerSecond) {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
Offer offer = offerFeedResponse.results().get(0);
offer.setThroughput(requestUnitsPerSecond);
return database.getDocClientWrapper().replaceOffer(offer).single();
}).map(offerResourceResponse -> offerResourceResponse.getResource().getThroughput());
}
/**
* Gets the parent Database
*
* @return the {@link CosmosDatabase}
*/
public CosmosDatabase getDatabase() {
return database;
}
String URIPathSegment() {
return Paths.COLLECTIONS_PATH_SEGMENT;
}
String parentLink() {
return database.getLink();
}
String getLink() {
StringBuilder builder = new StringBuilder();
builder.append(parentLink());
builder.append("/");
builder.append(URIPathSegment());
builder.append("/");
builder.append(id());
return builder.toString();
}
} | class CosmosContainer {
private CosmosDatabase database;
private String id;
private CosmosScripts scripts;
CosmosContainer(String id, CosmosDatabase database) {
this.id = id;
this.database = database;
}
/**
* Get the id of the {@link CosmosContainer}
*
* @return the id of the {@link CosmosContainer}
*/
public String id() {
return id;
}
/**
* Set the id of the {@link CosmosContainer}
*
* @param id the id of the {@link CosmosContainer}
* @return the same {@link CosmosContainer} that had the id set
*/
CosmosContainer id(String id) {
this.id = id;
return this;
}
/**
* Reads the document container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read() {
return read(new CosmosContainerRequestOptions());
}
/**
* Reads the document container by the container link.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @param options The cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().readCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted database. In case of failure the {@link Mono} will error.
*
* @param options the request options.
* @return an {@link Mono} containing the single cosmos container response for
* the deleted database or an error.
*/
public Mono<CosmosContainerResponse> delete(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().deleteCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response for
* the deleted container or an error.
*/
public Mono<CosmosContainerResponse> delete() {
return delete(new CosmosContainerRequestOptions());
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings) {
return replace(containerSettings, null);
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @param options the cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings,
CosmosContainerRequestOptions options) {
validateResource(containerSettings);
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper()
.replaceCollection(containerSettings.getV2Collection(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/* CosmosItem operations */
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item) {
return createItem(item, new CosmosItemRequestOptions());
}
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return database.getDocClientWrapper()
.createDocument(getLink(), CosmosItemProperties.fromObject(item), requestOptions, true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Upserts an item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item) {
return upsertItem(item, null);
}
/**
* Upserts a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return this.getDatabase().getDocClientWrapper()
.upsertDocument(this.getLink(), CosmosItemProperties.fromObject(item), options.toRequestOptions(), true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Reads all cosmos items in the container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems() {
return readAllItems(new FeedOptions());
}
/**
* Reads all cosmos items in a container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems(FeedOptions options) {
return getDatabase().getDocClientWrapper().readDocuments(getLink(), options).map(
response -> BridgeInternal.createFeedResponse(CosmosItemProperties.getFromV2Results(response.results()),
response.responseHeaders()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query) {
return queryItems(new SqlQuerySpec(query), null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query, FeedOptions options) {
return queryItems(new SqlQuerySpec(query), options);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec) {
return queryItems(querySpec, null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec, FeedOptions options) {
return getDatabase().getDocClientWrapper().queryDocuments(getLink(), querySpec, options)
.map(response -> BridgeInternal.createFeedResponseWithQueryMetrics(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(),
response.queryMetrics()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param changeFeedOptions the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryChangeFeedItems(ChangeFeedOptions changeFeedOptions) {
return getDatabase().getDocClientWrapper().queryDocumentChangeFeed(getLink(), changeFeedOptions)
.map(response -> new FeedResponse<CosmosItemProperties>(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(), false));
}
/**
* Gets a CosmosItem object without making a service call
*
* @param id id of the item
* @param partitionKey the partition key
* @return a cosmos item
*/
public CosmosItem getItem(String id, Object partitionKey) {
return new CosmosItem(id, partitionKey, this);
}
public CosmosScripts getScripts() {
if (this.scripts == null) {
this.scripts = new CosmosScripts(this);
}
return this.scripts;
}
/**
* Lists all the conflicts in the container
*
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> readAllConflicts(FeedOptions options) {
return database.getDocClientWrapper().readConflicts(getLink(), options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query) {
return queryConflicts(query, null);
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query, FeedOptions options) {
return database.getDocClientWrapper().queryConflicts(getLink(), query, options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Gets a CosmosConflict object without making a service call
*
* @param id id of the cosmos conflict
* @return a cosmos conflict
*/
public CosmosConflict getConflict(String id) {
return new CosmosConflict(id, this);
}
/**
* Gets the throughput of the container
*
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> readProvisionedThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> cosmosOfferResponse.getResource().getThroughput());
}
/**
* Gets the min throughput to which this container can be scaled down to
*
* @return a {@link Mono} containing min throughput or an error.
*/
/**
* Sets throughput provisioned for a container in measurement of
* Requests-per-Unit in the Azure Cosmos service.
*
* @param requestUnitsPerSecond the cosmos container throughput, expressed in
* Request Units per second
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> replaceProvisionedThroughput(int requestUnitsPerSecond) {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
Offer offer = offerFeedResponse.results().get(0);
offer.setThroughput(requestUnitsPerSecond);
return database.getDocClientWrapper().replaceOffer(offer).single();
}).map(offerResourceResponse -> offerResourceResponse.getResource().getThroughput());
}
/**
* Gets the parent Database
*
* @return the {@link CosmosDatabase}
*/
public CosmosDatabase getDatabase() {
return database;
}
String URIPathSegment() {
return Paths.COLLECTIONS_PATH_SEGMENT;
}
String parentLink() {
return database.getLink();
}
String getLink() {
StringBuilder builder = new StringBuilder();
builder.append(parentLink());
builder.append("/");
builder.append(URIPathSegment());
builder.append("/");
builder.append(id());
return builder.toString();
}
} |
Fixed in latest iteration | public Mono<Integer> readMinThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> Integer.parseInt(cosmosOfferResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.OFFER_MIN_THROUGHPUT)));
} | + cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions()) | public Mono<Integer> readMinThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers(
new SqlQuerySpec("select * from c where c.offerResourceId = @OFFER_RESOURCE_ID",
new SqlParameterList(new SqlParameter("@OFFER_RESOURCE_ID", cosmosContainerResponse.resourceSettings().resourceId()))), new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> Integer.parseInt(cosmosOfferResponse.getResponseHeaders().get(HttpConstants.HttpHeaders.OFFER_MIN_THROUGHPUT)));
} | class CosmosContainer {
private CosmosDatabase database;
private String id;
private CosmosScripts scripts;
CosmosContainer(String id, CosmosDatabase database) {
this.id = id;
this.database = database;
}
/**
* Get the id of the {@link CosmosContainer}
*
* @return the id of the {@link CosmosContainer}
*/
public String id() {
return id;
}
/**
* Set the id of the {@link CosmosContainer}
*
* @param id the id of the {@link CosmosContainer}
* @return the same {@link CosmosContainer} that had the id set
*/
CosmosContainer id(String id) {
this.id = id;
return this;
}
/**
* Reads the document container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read() {
return read(new CosmosContainerRequestOptions());
}
/**
* Reads the document container by the container link.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @param options The cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().readCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted database. In case of failure the {@link Mono} will error.
*
* @param options the request options.
* @return an {@link Mono} containing the single cosmos container response for
* the deleted database or an error.
*/
public Mono<CosmosContainerResponse> delete(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().deleteCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response for
* the deleted container or an error.
*/
public Mono<CosmosContainerResponse> delete() {
return delete(new CosmosContainerRequestOptions());
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings) {
return replace(containerSettings, null);
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @param options the cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings,
CosmosContainerRequestOptions options) {
validateResource(containerSettings);
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper()
.replaceCollection(containerSettings.getV2Collection(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/* CosmosItem operations */
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item) {
return createItem(item, new CosmosItemRequestOptions());
}
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return database.getDocClientWrapper()
.createDocument(getLink(), CosmosItemProperties.fromObject(item), requestOptions, true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Upserts an item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item) {
return upsertItem(item, null);
}
/**
* Upserts a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return this.getDatabase().getDocClientWrapper()
.upsertDocument(this.getLink(), CosmosItemProperties.fromObject(item), options.toRequestOptions(), true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Reads all cosmos items in the container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems() {
return readAllItems(new FeedOptions());
}
/**
* Reads all cosmos items in a container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems(FeedOptions options) {
return getDatabase().getDocClientWrapper().readDocuments(getLink(), options).map(
response -> BridgeInternal.createFeedResponse(CosmosItemProperties.getFromV2Results(response.results()),
response.responseHeaders()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query) {
return queryItems(new SqlQuerySpec(query), null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query, FeedOptions options) {
return queryItems(new SqlQuerySpec(query), options);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec) {
return queryItems(querySpec, null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec, FeedOptions options) {
return getDatabase().getDocClientWrapper().queryDocuments(getLink(), querySpec, options)
.map(response -> BridgeInternal.createFeedResponseWithQueryMetrics(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(),
response.queryMetrics()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param changeFeedOptions the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryChangeFeedItems(ChangeFeedOptions changeFeedOptions) {
return getDatabase().getDocClientWrapper().queryDocumentChangeFeed(getLink(), changeFeedOptions)
.map(response -> new FeedResponse<CosmosItemProperties>(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(), false));
}
/**
* Gets a CosmosItem object without making a service call
*
* @param id id of the item
* @param partitionKey the partition key
* @return a cosmos item
*/
public CosmosItem getItem(String id, Object partitionKey) {
return new CosmosItem(id, partitionKey, this);
}
public CosmosScripts getScripts() {
if (this.scripts == null) {
this.scripts = new CosmosScripts(this);
}
return this.scripts;
}
/**
* Lists all the conflicts in the container
*
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> readAllConflicts(FeedOptions options) {
return database.getDocClientWrapper().readConflicts(getLink(), options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query) {
return queryConflicts(query, null);
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query, FeedOptions options) {
return database.getDocClientWrapper().queryConflicts(getLink(), query, options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Gets a CosmosConflict object without making a service call
*
* @param id id of the cosmos conflict
* @return a cosmos conflict
*/
public CosmosConflict getConflict(String id) {
return new CosmosConflict(id, this);
}
/**
* Gets the throughput of the container
*
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> readProvisionedThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> cosmosOfferResponse.getResource().getThroughput());
}
/**
* Gets the min throughput to which this container can be scaled down to
*
* @return a {@link Mono} containing min throughput or an error.
*/
/**
* Sets throughput provisioned for a container in measurement of
* Requests-per-Unit in the Azure Cosmos service.
*
* @param requestUnitsPerSecond the cosmos container throughput, expressed in
* Request Units per second
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> replaceProvisionedThroughput(int requestUnitsPerSecond) {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
Offer offer = offerFeedResponse.results().get(0);
offer.setThroughput(requestUnitsPerSecond);
return database.getDocClientWrapper().replaceOffer(offer).single();
}).map(offerResourceResponse -> offerResourceResponse.getResource().getThroughput());
}
/**
* Gets the parent Database
*
* @return the {@link CosmosDatabase}
*/
public CosmosDatabase getDatabase() {
return database;
}
String URIPathSegment() {
return Paths.COLLECTIONS_PATH_SEGMENT;
}
String parentLink() {
return database.getLink();
}
String getLink() {
StringBuilder builder = new StringBuilder();
builder.append(parentLink());
builder.append("/");
builder.append(URIPathSegment());
builder.append("/");
builder.append(id());
return builder.toString();
}
} | class CosmosContainer {
private CosmosDatabase database;
private String id;
private CosmosScripts scripts;
CosmosContainer(String id, CosmosDatabase database) {
this.id = id;
this.database = database;
}
/**
* Get the id of the {@link CosmosContainer}
*
* @return the id of the {@link CosmosContainer}
*/
public String id() {
return id;
}
/**
* Set the id of the {@link CosmosContainer}
*
* @param id the id of the {@link CosmosContainer}
* @return the same {@link CosmosContainer} that had the id set
*/
CosmosContainer id(String id) {
this.id = id;
return this;
}
/**
* Reads the document container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read() {
return read(new CosmosContainerRequestOptions());
}
/**
* Reads the document container by the container link.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the read container. In case of failure the {@link Mono} will error.
*
* @param options The cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the read container or an error.
*/
public Mono<CosmosContainerResponse> read(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().readCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted database. In case of failure the {@link Mono} will error.
*
* @param options the request options.
* @return an {@link Mono} containing the single cosmos container response for
* the deleted database or an error.
*/
public Mono<CosmosContainerResponse> delete(CosmosContainerRequestOptions options) {
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper().deleteCollection(getLink(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/**
* Deletes the item container
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response for the
* deleted container. In case of failure the {@link Mono} will error.
*
* @return an {@link Mono} containing the single cosmos container response for
* the deleted container or an error.
*/
public Mono<CosmosContainerResponse> delete() {
return delete(new CosmosContainerRequestOptions());
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings) {
return replace(containerSettings, null);
}
/**
* Replaces a document container.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single cosmos container response with
* the replaced document container. In case of failure the {@link Mono} will
* error.
*
* @param containerSettings the item container properties
* @param options the cosmos container request options.
* @return an {@link Mono} containing the single cosmos container response with
* the replaced document container or an error.
*/
public Mono<CosmosContainerResponse> replace(CosmosContainerProperties containerSettings,
CosmosContainerRequestOptions options) {
validateResource(containerSettings);
if (options == null) {
options = new CosmosContainerRequestOptions();
}
return database.getDocClientWrapper()
.replaceCollection(containerSettings.getV2Collection(), options.toRequestOptions())
.map(response -> new CosmosContainerResponse(response, database)).single();
}
/* CosmosItem operations */
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item) {
return createItem(item, new CosmosItemRequestOptions());
}
/**
* Creates a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* created cosmos item. In case of failure the {@link Mono} will error.
*
* @param item the cosmos item represented as a POJO or cosmos item object.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* created cosmos item or an error.
*/
public Mono<CosmosItemResponse> createItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return database.getDocClientWrapper()
.createDocument(getLink(), CosmosItemProperties.fromObject(item), requestOptions, true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Upserts an item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item) {
return upsertItem(item, null);
}
/**
* Upserts a cosmos item.
*
* After subscription the operation will be performed. The {@link Mono} upon
* successful completion will contain a single resource response with the
* upserted item. In case of failure the {@link Mono} will error.
*
* @param item the item represented as a POJO or Item object to upsert.
* @param options the request options.
* @return an {@link Mono} containing the single resource response with the
* upserted document or an error.
*/
public Mono<CosmosItemResponse> upsertItem(Object item, CosmosItemRequestOptions options) {
if (options == null) {
options = new CosmosItemRequestOptions();
}
RequestOptions requestOptions = options.toRequestOptions();
return this.getDatabase().getDocClientWrapper()
.upsertDocument(this.getLink(), CosmosItemProperties.fromObject(item), options.toRequestOptions(), true)
.map(response -> new CosmosItemResponse(response, requestOptions.getPartitionKey(), this)).single();
}
/**
* Reads all cosmos items in the container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems() {
return readAllItems(new FeedOptions());
}
/**
* Reads all cosmos items in a container.
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the read cosmos items. In case of
* failure the {@link Flux} will error.
*
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* read cosmos items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> readAllItems(FeedOptions options) {
return getDatabase().getDocClientWrapper().readDocuments(getLink(), options).map(
response -> BridgeInternal.createFeedResponse(CosmosItemProperties.getFromV2Results(response.results()),
response.responseHeaders()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query) {
return queryItems(new SqlQuerySpec(query), null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param query the query.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(String query, FeedOptions options) {
return queryItems(new SqlQuerySpec(query), options);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec) {
return queryItems(querySpec, null);
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryItems(SqlQuerySpec querySpec, FeedOptions options) {
return getDatabase().getDocClientWrapper().queryDocuments(getLink(), querySpec, options)
.map(response -> BridgeInternal.createFeedResponseWithQueryMetrics(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(),
response.queryMetrics()));
}
/**
* Query for documents in a items in a container
*
* After subscription the operation will be performed. The {@link Flux} will
* contain one or several feed response of the obtained items. In case of
* failure the {@link Flux} will error.
*
* @param changeFeedOptions the feed options.
* @return an {@link Flux} containing one or several feed response pages of the
* obtained items or an error.
*/
public Flux<FeedResponse<CosmosItemProperties>> queryChangeFeedItems(ChangeFeedOptions changeFeedOptions) {
return getDatabase().getDocClientWrapper().queryDocumentChangeFeed(getLink(), changeFeedOptions)
.map(response -> new FeedResponse<CosmosItemProperties>(
CosmosItemProperties.getFromV2Results(response.results()), response.responseHeaders(), false));
}
/**
* Gets a CosmosItem object without making a service call
*
* @param id id of the item
* @param partitionKey the partition key
* @return a cosmos item
*/
public CosmosItem getItem(String id, Object partitionKey) {
return new CosmosItem(id, partitionKey, this);
}
public CosmosScripts getScripts() {
if (this.scripts == null) {
this.scripts = new CosmosScripts(this);
}
return this.scripts;
}
/**
* Lists all the conflicts in the container
*
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> readAllConflicts(FeedOptions options) {
return database.getDocClientWrapper().readConflicts(getLink(), options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query) {
return queryConflicts(query, null);
}
/**
* Queries all the conflicts in the container
*
* @param query the query
* @param options the feed options
* @return a {@link Flux} containing one or several feed response pages of the
* obtained conflicts or an error.
*/
public Flux<FeedResponse<CosmosConflictProperties>> queryConflicts(String query, FeedOptions options) {
return database.getDocClientWrapper().queryConflicts(getLink(), query, options)
.map(response -> BridgeInternal.createFeedResponse(
CosmosConflictProperties.getFromV2Results(response.results()), response.responseHeaders()));
}
/**
* Gets a CosmosConflict object without making a service call
*
* @param id id of the cosmos conflict
* @return a cosmos conflict
*/
public CosmosConflict getConflict(String id) {
return new CosmosConflict(id, this);
}
/**
* Gets the throughput of the container
*
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> readProvisionedThroughput() {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
return database.getDocClientWrapper().readOffer(offerFeedResponse.results().get(0).selfLink())
.single();
}).map(cosmosOfferResponse -> cosmosOfferResponse.getResource().getThroughput());
}
/**
* Gets the min throughput to which this container can be scaled down to
*
* @return a {@link Mono} containing min throughput or an error.
*/
/**
* Sets throughput provisioned for a container in measurement of
* Requests-per-Unit in the Azure Cosmos service.
*
* @param requestUnitsPerSecond the cosmos container throughput, expressed in
* Request Units per second
* @return a {@link Mono} containing throughput or an error.
*/
public Mono<Integer> replaceProvisionedThroughput(int requestUnitsPerSecond) {
return this.read().flatMap(cosmosContainerResponse -> database.getDocClientWrapper()
.queryOffers("select * from c where c.offerResourceId = '"
+ cosmosContainerResponse.resourceSettings().resourceId() + "'", new FeedOptions())
.single()).flatMap(offerFeedResponse -> {
if (offerFeedResponse.results().isEmpty()) {
return Mono.error(BridgeInternal.createCosmosClientException(HttpConstants.StatusCodes.BADREQUEST,
"No offers found for the resource"));
}
Offer offer = offerFeedResponse.results().get(0);
offer.setThroughput(requestUnitsPerSecond);
return database.getDocClientWrapper().replaceOffer(offer).single();
}).map(offerResourceResponse -> offerResourceResponse.getResource().getThroughput());
}
/**
* Gets the parent Database
*
* @return the {@link CosmosDatabase}
*/
public CosmosDatabase getDatabase() {
return database;
}
String URIPathSegment() {
return Paths.COLLECTIONS_PATH_SEGMENT;
}
String parentLink() {
return database.getLink();
}
String getLink() {
StringBuilder builder = new StringBuilder();
builder.append(parentLink());
builder.append("/");
builder.append(URIPathSegment());
builder.append("/");
builder.append(id());
return builder.toString();
}
} |
Parameterized type, please change this to `LatencyListener<T>`... `new LatencyListener<>(resultHandler, ...)` | void run() throws Exception {
successMeter = metricsRegistry.meter("
failureMeter = metricsRegistry.meter("
switch (configuration.getOperationType()) {
case ReadLatency:
case Mixed:
latency = metricsRegistry.timer("Latency");
break;
default:
break;
}
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
for ( i = 0; shouldContinue(startTime, i); i++) {
ResultHandler<T, Throwable> resultHandler = new ResultHandler<T, Throwable>() {
@Override
public T apply(T t, Throwable throwable) {
successMeter.mark();
concurrencyControlSemaphore.release();
if (t != null) {
assert(throwable == null);
SyncBenchmark.this.onSuccess();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
} else {
assert(throwable != null);
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
SyncBenchmark.this.onError(throwable);
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
return t;
}
};
concurrencyControlSemaphore.acquire();
final long cnt = i;
switch (configuration.getOperationType()) {
case ReadLatency:
LatencyListener latencyListener = new LatencyListener(resultHandler, latency);
latencyListener.context = latency.time();
resultHandler = latencyListener;
break;
default:
break;
}
final ResultHandler<T, Throwable> finalResultHandler = resultHandler;
CompletableFuture futureResult = CompletableFuture.supplyAsync(() -> {
try {
finalResultHandler.init();
return performWorkload(cnt);
} catch (Exception e) {
throw propagate(e);
}
}, executorService);
futureResult.handle(resultHandler);
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | LatencyListener latencyListener = new LatencyListener(resultHandler, latency); | void run() throws Exception {
successMeter = metricsRegistry.meter("
failureMeter = metricsRegistry.meter("
switch (configuration.getOperationType()) {
case ReadLatency:
case Mixed:
latency = metricsRegistry.timer("Latency");
break;
default:
break;
}
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
for ( i = 0; shouldContinue(startTime, i); i++) {
ResultHandler<T, Throwable> resultHandler = new ResultHandler<T, Throwable>() {
@Override
public T apply(T t, Throwable throwable) {
successMeter.mark();
concurrencyControlSemaphore.release();
if (t != null) {
assert(throwable == null);
SyncBenchmark.this.onSuccess();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
} else {
assert(throwable != null);
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
SyncBenchmark.this.onError(throwable);
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
return t;
}
};
concurrencyControlSemaphore.acquire();
final long cnt = i;
switch (configuration.getOperationType()) {
case ReadLatency:
LatencyListener<T> latencyListener = new LatencyListener(resultHandler, latency);
latencyListener.context = latency.time();
resultHandler = latencyListener;
break;
default:
break;
}
final ResultHandler<T, Throwable> finalResultHandler = resultHandler;
CompletableFuture<T> futureResult = CompletableFuture.supplyAsync(() -> {
try {
finalResultHandler.init();
return performWorkload(cnt);
} catch (Exception e) {
throw propagate(e);
}
}, executorService);
futureResult.handle(resultHandler);
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | class LatencyListener<T> extends ResultHandler<T, Throwable> {
private final ResultHandler<T, Throwable> baseFunction;
Timer.Context context;
LatencyListener(ResultHandler<T, Throwable> baseFunction, Timer latency) {
this.baseFunction = baseFunction;
}
protected void init() {
super.init();
context = latency.time();
}
@Override
public T apply(T o, Throwable throwable) {
context.stop();
return baseFunction.apply(o, throwable);
}
} | class LatencyListener<T> extends ResultHandler<T, Throwable> {
private final ResultHandler<T, Throwable> baseFunction;
private final Timer latencyTimer;
Timer.Context context;
LatencyListener(ResultHandler<T, Throwable> baseFunction, Timer latencyTimer) {
this.baseFunction = baseFunction;
this.latencyTimer = latencyTimer;
}
protected void init() {
super.init();
context = latencyTimer.time();
}
@Override
public T apply(T o, Throwable throwable) {
context.stop();
return baseFunction.apply(o, throwable);
}
} |
Use `try-with-resources` to ensure the resource is always closed. | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
t.printStackTrace(pw);
buf.append(sw.toString());
pw.close();
}
System.out.println(buf.toString());
} | pw.close(); | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Since StringWriter has no-op close(), I only move pw to try with resource. Thanks for pointing this out! | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
t.printStackTrace(pw);
buf.append(sw.toString());
pw.close();
}
System.out.println(buf.toString());
} | pw.close(); | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Generally, you try to limit the scope of your assertion. How do you know it threw in credential(null) and not buildclient()? This goes for all the other tests that do the same. ```java // arrange final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder(); // Act & Assert assertThrows(NullPointerException.class, () -> builder.credential(null)); ``` | public void nullAADCredential() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).credential(null).buildClient();
});
} | builder.endpoint(getEndpoint()).credential(null).buildClient(); | public void nullAADCredential() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).credential(null);
});
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that the error result is returned when empty text is passed.
*/
@Test
public void detectLanguageEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
DetectLanguageResult result = client.detectLanguage("");
assertNotNull(result.getError());
validateErrorDocument(expectedError, result.getError());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that an error document is returned for a text input with invalid country hint.
* <p>
* TODO: update error Model.
*/
@Test
public void detectLanguageInvalidCountryHint() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid Country Hint.", null, null);
validateErrorDocument(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE).getValue().getError(),
expectedError);
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(400, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeEntities("").getError());
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizePiiEntities("").getError());
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeLinkedEntities("").getError());
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.extractKeyPhrases("").getError());
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.1, 0.5, 0.4, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that an error document is returned for a empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.analyzeSentiment("").getError());
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test client builder with valid subscription key
*/
@Test
public void validKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey())).buildClient();
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test client builder with invalid subscription key
*/
@Test
public void invalidKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsSubscriptionKeyCredential(INVALID_KEY)).buildClient();
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with valid subscription key but update to invalid key and make call to server.
*/
@Test
public void updateToInvalidKey() {
final TextAnalyticsSubscriptionKeyCredential credential =
new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey());
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(INVALID_KEY);
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with invalid subscription key but update to valid key and make call to server.
*/
@Test
public void updateToValidKey() {
final TextAnalyticsSubscriptionKeyCredential credential =
new TextAnalyticsSubscriptionKeyCredential(INVALID_KEY);
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(getSubscriptionKey());
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for missing endpoint
*/
@Test
public void missingEndpoint() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.buildClient();
});
}
/**
* Test for null subscription key
*/
@Test
public void nullSubscriptionKey() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).subscriptionKey(null).buildClient();
});
}
/**
* Test for null AAD credential
*/
@Test
/**
* Test for null service version, which would take take the default service version by default
*/
@Test
public void nullServiceVersion() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey()))
.retryPolicy(new RetryPolicy())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.serviceVersion(null);
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for default pipeline in client builder
*/
@Test
public void defaultPipeline() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey()))
.configuration(Configuration.getGlobalConfiguration())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that the error result is returned when empty text is passed.
*/
@Test
public void detectLanguageEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
DetectLanguageResult result = client.detectLanguage("");
assertNotNull(result.getError());
validateErrorDocument(expectedError, result.getError());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that an error document is returned for a text input with invalid country hint.
* <p>
* TODO: update error Model.
*/
@Test
public void detectLanguageInvalidCountryHint() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid Country Hint.", null, null);
validateErrorDocument(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE).getValue().getError(),
expectedError);
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(400, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeEntities("").getError());
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizePiiEntities("").getError());
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeLinkedEntities("").getError());
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.extractKeyPhrases("").getError());
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.1, 0.5, 0.4, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that an error document is returned for a empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.analyzeSentiment("").getError());
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test client builder with valid subscription key
*/
@Test
public void validKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildClient();
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test client builder with invalid subscription key
*/
@Test
public void invalidKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient();
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with valid subscription key but update to invalid key and make call to server.
*/
@Test
public void updateToInvalidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(getSubscriptionKey());
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(INVALID_KEY);
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with invalid subscription key but update to valid key and make call to server.
*/
@Test
public void updateToValidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(INVALID_KEY);
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(getSubscriptionKey());
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for missing endpoint
*/
@Test
public void missingEndpoint() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.buildClient();
});
}
/**
* Test for null subscription key
*/
@Test
public void nullSubscriptionKey() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).subscriptionKey(null);
});
}
/**
* Test for null AAD credential
*/
@Test
/**
* Test for null service version, which would take take the default service version by default
*/
@Test
public void nullServiceVersion() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.retryPolicy(new RetryPolicy())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.serviceVersion(null);
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for default pipeline in client builder
*/
@Test
public void defaultPipeline() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.configuration(Configuration.getGlobalConfiguration())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
} |
If we log an error that doesn't have any vararg arguments this will throw an exception from what I understand. | public void error(final String msg) {
throw new UnsupportedOperationException();
} | throw new UnsupportedOperationException(); | public void error(final String msg) {
logMessageWithFormat("ERROR", msg);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
``` logger.error(format, args); ``` This is the one we used. | public void error(final String msg) {
throw new UnsupportedOperationException();
} | throw new UnsupportedOperationException(); | public void error(final String msg) {
logMessageWithFormat("ERROR", msg);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
This code is common for all `is*Enabled()` methods. You can extract this out to a method to reduce duplication. | public boolean isDebugEnabled() {
String logLevelStr = Configuration.getGlobalConfiguration().get(AZURE_LOG_LEVEL);
LogLevel currentLogLevel = LogLevel.fromString(logLevelStr);
return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel();
} | return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel(); | public boolean isDebugEnabled() {
return isLogLevelEnabledFromEnv(LogLevel.VERBOSE);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
This null check is still required otherwise it will cause a NPE. | public boolean canLogAtLevel(LogLevel logLevel) {
switch (logLevel) {
case VERBOSE:
return logger.isDebugEnabled();
case INFORMATIONAL:
return logger.isInfoEnabled();
case WARNING:
return logger.isWarnEnabled();
case ERROR:
return logger.isErrorEnabled();
default:
return false;
}
} | switch (logLevel) { | public boolean canLogAtLevel(LogLevel logLevel) {
if (logLevel == null) {
return false;
}
switch (logLevel) {
case VERBOSE:
return logger.isDebugEnabled();
case INFORMATIONAL:
return logger.isInfoEnabled();
case WARNING:
return logger.isWarnEnabled();
case ERROR:
return logger.isErrorEnabled();
default:
return false;
}
} | class name using the {@link LoggerFactory} | class name using the {@link LoggerFactory} |
Should this logic be abstracted into `LoggingUtils` or as a private static method on this class? | public boolean isDebugEnabled() {
String logLevelStr = Configuration.getGlobalConfiguration().get(AZURE_LOG_LEVEL);
LogLevel currentLogLevel = LogLevel.fromString(logLevelStr);
return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel();
} | LogLevel currentLogLevel = LogLevel.fromString(logLevelStr); | public boolean isDebugEnabled() {
return isLogLevelEnabledFromEnv(LogLevel.VERBOSE);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
One of the SDK guidelines it to only print an exception's stack trace when logging is set to verbose. In `ClientLogger` it was set to only log the exception message at a different level. | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | } | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Done. | public boolean canLogAtLevel(LogLevel logLevel) {
switch (logLevel) {
case VERBOSE:
return logger.isDebugEnabled();
case INFORMATIONAL:
return logger.isInfoEnabled();
case WARNING:
return logger.isWarnEnabled();
case ERROR:
return logger.isErrorEnabled();
default:
return false;
}
} | switch (logLevel) { | public boolean canLogAtLevel(LogLevel logLevel) {
if (logLevel == null) {
return false;
}
switch (logLevel) {
case VERBOSE:
return logger.isDebugEnabled();
case INFORMATIONAL:
return logger.isInfoEnabled();
case WARNING:
return logger.isWarnEnabled();
case ERROR:
return logger.isErrorEnabled();
default:
return false;
}
} | class name using the {@link LoggerFactory} | class name using the {@link LoggerFactory} |
Done. | public boolean isDebugEnabled() {
String logLevelStr = Configuration.getGlobalConfiguration().get(AZURE_LOG_LEVEL);
LogLevel currentLogLevel = LogLevel.fromString(logLevelStr);
return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel();
} | return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel(); | public boolean isDebugEnabled() {
return isLogLevelEnabledFromEnv(LogLevel.VERBOSE);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Have a private helper method in class. Env log level is only needed for default logger, so it is ok to leave it here for current use. | public boolean isDebugEnabled() {
String logLevelStr = Configuration.getGlobalConfiguration().get(AZURE_LOG_LEVEL);
LogLevel currentLogLevel = LogLevel.fromString(logLevelStr);
return LogLevel.VERBOSE.getLogLevel() >= currentLogLevel.getLogLevel();
} | LogLevel currentLogLevel = LogLevel.fromString(logLevelStr); | public boolean isDebugEnabled() {
return isLogLevelEnabledFromEnv(LogLevel.VERBOSE);
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Talked offline. | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | } | void writeWithThrowable(StringBuilder buf, Throwable t) {
if (t != null) {
StringWriter sw = new StringWriter();
try (PrintWriter pw = new PrintWriter(sw)) {
t.printStackTrace(pw);
buf.append(sw.toString());
}
}
System.out.println(buf.toString());
} | class name.
*
* @param className Class name creating the logger.
* @throws RuntimeException it is an error.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} | class name passes in.
*/
public DefaultLogger(String className) {
try {
this.classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
this.classPath = className;
}
} |
Make sense. I will update it. | public void nullAADCredential() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).credential(null).buildClient();
});
} | builder.endpoint(getEndpoint()).credential(null).buildClient(); | public void nullAADCredential() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).credential(null);
});
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that the error result is returned when empty text is passed.
*/
@Test
public void detectLanguageEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
DetectLanguageResult result = client.detectLanguage("");
assertNotNull(result.getError());
validateErrorDocument(expectedError, result.getError());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that an error document is returned for a text input with invalid country hint.
* <p>
* TODO: update error Model.
*/
@Test
public void detectLanguageInvalidCountryHint() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid Country Hint.", null, null);
validateErrorDocument(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE).getValue().getError(),
expectedError);
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(400, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeEntities("").getError());
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizePiiEntities("").getError());
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeLinkedEntities("").getError());
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.extractKeyPhrases("").getError());
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.1, 0.5, 0.4, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that an error document is returned for a empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.analyzeSentiment("").getError());
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test client builder with valid subscription key
*/
@Test
public void validKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey())).buildClient();
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test client builder with invalid subscription key
*/
@Test
public void invalidKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsSubscriptionKeyCredential(INVALID_KEY)).buildClient();
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with valid subscription key but update to invalid key and make call to server.
*/
@Test
public void updateToInvalidKey() {
final TextAnalyticsSubscriptionKeyCredential credential =
new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey());
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(INVALID_KEY);
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with invalid subscription key but update to valid key and make call to server.
*/
@Test
public void updateToValidKey() {
final TextAnalyticsSubscriptionKeyCredential credential =
new TextAnalyticsSubscriptionKeyCredential(INVALID_KEY);
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(getSubscriptionKey());
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for missing endpoint
*/
@Test
public void missingEndpoint() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.buildClient();
});
}
/**
* Test for null subscription key
*/
@Test
public void nullSubscriptionKey() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).subscriptionKey(null).buildClient();
});
}
/**
* Test for null AAD credential
*/
@Test
/**
* Test for null service version, which would take take the default service version by default
*/
@Test
public void nullServiceVersion() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey()))
.retryPolicy(new RetryPolicy())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.serviceVersion(null);
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for default pipeline in client builder
*/
@Test
public void defaultPipeline() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsSubscriptionKeyCredential(getSubscriptionKey()))
.configuration(Configuration.getGlobalConfiguration())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that the error result is returned when empty text is passed.
*/
@Test
public void detectLanguageEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
DetectLanguageResult result = client.detectLanguage("");
assertNotNull(result.getError());
validateErrorDocument(expectedError, result.getError());
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that an error document is returned for a text input with invalid country hint.
* <p>
* TODO: update error Model.
*/
@Test
public void detectLanguageInvalidCountryHint() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid Country Hint.", null, null);
validateErrorDocument(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE).getValue().getError(),
expectedError);
}
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(400, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeEntities("").getError());
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizePiiEntities("").getError());
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.recognizeLinkedEntities("").getError());
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.extractKeyPhrases("").getError());
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.1, 0.5, 0.4, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that an error document is returned for a empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
TextAnalyticsError expectedError = new TextAnalyticsError(ErrorCodeValue.INVALID_ARGUMENT, "Invalid document in request.", null, null);
validateErrorDocument(expectedError, client.analyzeSentiment("").getError());
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test client builder with valid subscription key
*/
@Test
public void validKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildClient();
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test client builder with invalid subscription key
*/
@Test
public void invalidKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient();
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with valid subscription key but update to invalid key and make call to server.
*/
@Test
public void updateToInvalidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(getSubscriptionKey());
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(INVALID_KEY);
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with invalid subscription key but update to valid key and make call to server.
*/
@Test
public void updateToValidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(INVALID_KEY);
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(getSubscriptionKey());
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for missing endpoint
*/
@Test
public void missingEndpoint() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.buildClient();
});
}
/**
* Test for null subscription key
*/
@Test
public void nullSubscriptionKey() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).subscriptionKey(null);
});
}
/**
* Test for null AAD credential
*/
@Test
/**
* Test for null service version, which would take take the default service version by default
*/
@Test
public void nullServiceVersion() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.retryPolicy(new RetryPolicy())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.serviceVersion(null);
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for default pipeline in client builder
*/
@Test
public void defaultPipeline() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.configuration(Configuration.getGlobalConfiguration())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
} |
Seems like we should introduce a method something like envContains(params string var) that returns true if the env contains all of the 'var's. | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromSupplier(() -> {
if (configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_ID)
&& configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_SECRET)
&& configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID)) {
return new ClientSecretCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET),
identityClientOptions);
} else if (configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_ID)
&& configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID)
&& configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH)) {
return new ClientCertificateCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH),
null,
identityClientOptions);
} else if (configuration.contains(Configuration.PROPERTY_AZURE_USERNAME)
&& configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_ID)
&& configuration.contains(Configuration.PROPERTY_AZURE_PASSWORD)) {
return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_USERNAME),
configuration.get(Configuration.PROPERTY_AZURE_PASSWORD),
identityClientOptions);
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | } else if (configuration.contains(Configuration.PROPERTY_AZURE_CLIENT_ID) | public Mono<AccessToken> getToken(TokenRequestContext request) {
String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET);
String certPath = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH);
String username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME);
String password = configuration.get(Configuration.PROPERTY_AZURE_PASSWORD);
return Mono.fromSupplier(() -> {
if (verifyNotNull(clientId)) {
if (verifyNotNull(tenantId, clientSecret)) {
return new ClientSecretCredential(tenantId, clientId, clientSecret, identityClientOptions);
} else if (verifyNotNull(tenantId, certPath)) {
return new ClientCertificateCredential(tenantId, clientId, certPath, null, identityClientOptions);
} else if (verifyNotNull(username, password)) {
return new UsernamePasswordCredential(clientId, tenantId, username, password, identityClientOptions);
}
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
private boolean verifyNotNull(String... configs){
for(String config: configs){
if(config == null){
return false;
}
}
return true;
}
} |
Is this not explicitly needed by `UsernamePasswordCredential`? Otherwise the validation check is missing this. | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromSupplier(() -> {
if (configuration.containsAll(Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_CLIENT_SECRET)) {
return new ClientSecretCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET),
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH)) {
return new ClientCertificateCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH),
null,
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_USERNAME,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_PASSWORD)) {
return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_USERNAME),
configuration.get(Configuration.PROPERTY_AZURE_PASSWORD),
identityClientOptions);
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID), | public Mono<AccessToken> getToken(TokenRequestContext request) {
String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET);
String certPath = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH);
String username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME);
String password = configuration.get(Configuration.PROPERTY_AZURE_PASSWORD);
return Mono.fromSupplier(() -> {
if (verifyNotNull(clientId)) {
if (verifyNotNull(tenantId, clientSecret)) {
return new ClientSecretCredential(tenantId, clientId, clientSecret, identityClientOptions);
} else if (verifyNotNull(tenantId, certPath)) {
return new ClientCertificateCredential(tenantId, clientId, certPath, null, identityClientOptions);
} else if (verifyNotNull(username, password)) {
return new UsernamePasswordCredential(clientId, tenantId, username, password, identityClientOptions);
}
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
private boolean verifyNotNull(String... configs){
for(String config: configs){
if(config == null){
return false;
}
}
return true;
}
} |
Thoughts on changing the `containsAll` method to `getAll`? There are already some race condition chances going on with validating existence then retrieval, if we retrieval all into local instances we could reduce the number of timing issues happening. | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromSupplier(() -> {
if (configuration.containsAll(Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_CLIENT_SECRET)) {
return new ClientSecretCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET),
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH)) {
return new ClientCertificateCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH),
null,
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_USERNAME,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_PASSWORD)) {
return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_USERNAME),
configuration.get(Configuration.PROPERTY_AZURE_PASSWORD),
identityClientOptions);
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | if (configuration.containsAll(Configuration.PROPERTY_AZURE_TENANT_ID, | public Mono<AccessToken> getToken(TokenRequestContext request) {
String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET);
String certPath = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH);
String username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME);
String password = configuration.get(Configuration.PROPERTY_AZURE_PASSWORD);
return Mono.fromSupplier(() -> {
if (verifyNotNull(clientId)) {
if (verifyNotNull(tenantId, clientSecret)) {
return new ClientSecretCredential(tenantId, clientId, clientSecret, identityClientOptions);
} else if (verifyNotNull(tenantId, certPath)) {
return new ClientCertificateCredential(tenantId, clientId, certPath, null, identityClientOptions);
} else if (verifyNotNull(username, password)) {
return new UsernamePasswordCredential(clientId, tenantId, username, password, identityClientOptions);
}
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
private boolean verifyNotNull(String... configs){
for(String config: configs){
if(config == null){
return false;
}
}
return true;
}
} |
Could we retrieve the commonly needed configuration and check those first? Seems like we could make an initial check that `PROPERTY_AZURE_CLIENT_ID` is set and if not just early out. | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromSupplier(() -> {
if (configuration.containsAll(Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_CLIENT_SECRET)) {
return new ClientSecretCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET),
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH)) {
return new ClientCertificateCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH),
null,
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_USERNAME,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_PASSWORD)) {
return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_USERNAME),
configuration.get(Configuration.PROPERTY_AZURE_PASSWORD),
identityClientOptions);
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID), | public Mono<AccessToken> getToken(TokenRequestContext request) {
String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET);
String certPath = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH);
String username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME);
String password = configuration.get(Configuration.PROPERTY_AZURE_PASSWORD);
return Mono.fromSupplier(() -> {
if (verifyNotNull(clientId)) {
if (verifyNotNull(tenantId, clientSecret)) {
return new ClientSecretCredential(tenantId, clientId, clientSecret, identityClientOptions);
} else if (verifyNotNull(tenantId, certPath)) {
return new ClientCertificateCredential(tenantId, clientId, certPath, null, identityClientOptions);
} else if (verifyNotNull(username, password)) {
return new UsernamePasswordCredential(clientId, tenantId, username, password, identityClientOptions);
}
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
private boolean verifyNotNull(String... configs){
for(String config: configs){
if(config == null){
return false;
}
}
return true;
}
} |
Could remove the local instance since `Configuration` uses the fluent pattern for `put`. | public void testCreateEnvironmentClientCertificateCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | Configuration configuration = Configuration.getGlobalConfiguration(); | public void testCreateEnvironmentClientCertificateCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
@Test
public void testCreateEnvironmentUserPasswordCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_USERNAME, "bar");
configuration.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
@Test
public void testCreateEnvironmentUserPasswordCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
} |
Same as the previous comment | public void testCreateEnvironmentUserPasswordCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_USERNAME, "bar");
configuration.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | Configuration configuration = Configuration.getGlobalConfiguration(); | public void testCreateEnvironmentUserPasswordCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
public void testCreateEnvironmentClientCertificateCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
public void testCreateEnvironmentClientCertificateCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
} |
NIT: I think this comment should say "ClientCertificateCredential" instead of "ClientSecretCredential" | public void testCreateEnvironmentClientCertificateCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | public void testCreateEnvironmentClientCertificateCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
@Test
public void testCreateEnvironmentUserPasswordCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_USERNAME, "bar");
configuration.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
@Test
public void testCreateEnvironmentUserPasswordCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
} | |
NIT: Similar feedback as above, I think this should be UsernamePasswordCredential in the comment. | public void testCreateEnvironmentUserPasswordCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_USERNAME, "bar");
configuration.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | public void testCreateEnvironmentUserPasswordCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
public void testCreateEnvironmentClientCertificateCredential() {
Configuration configuration = Configuration.getGlobalConfiguration();
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo");
configuration.put(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH, "bar");
configuration.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
} | class EnvironmentCredentialTests {
@Test
public void testCreateEnvironmentClientSecretCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
public void testCreateEnvironmentClientCertificateCredential() {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_USERNAME, "bar")
.put(Configuration.PROPERTY_AZURE_PASSWORD, "baz");
EnvironmentCredential credential = new EnvironmentCredentialBuilder().build();
StepVerifier.create(credential.getToken(new TokenRequestContext().addScopes("qux/.default"))
.doOnSuccess(s -> fail())
.onErrorResume(t -> {
String message = t.getMessage();
Assert.assertFalse(message != null && message.contains("Cannot create any credentials with the current environment variables"));
return Mono.just(new AccessToken("token", OffsetDateTime.MAX));
}))
.expectNextMatches(token -> "token".equals(token.getToken()))
.verifyComplete();
}
@Test
} | |
No it's not, but supply it anyway (`null` if not set, or a tenant id if set). | public Mono<AccessToken> getToken(TokenRequestContext request) {
return Mono.fromSupplier(() -> {
if (configuration.containsAll(Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_CLIENT_SECRET)) {
return new ClientSecretCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET),
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_TENANT_ID,
Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH)) {
return new ClientCertificateCredential(configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH),
null,
identityClientOptions);
} else if (configuration.containsAll(Configuration.PROPERTY_AZURE_USERNAME,
Configuration.PROPERTY_AZURE_CLIENT_ID,
Configuration.PROPERTY_AZURE_PASSWORD)) {
return new UsernamePasswordCredential(configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID),
configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID),
configuration.get(Configuration.PROPERTY_AZURE_USERNAME),
configuration.get(Configuration.PROPERTY_AZURE_PASSWORD),
identityClientOptions);
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID), | public Mono<AccessToken> getToken(TokenRequestContext request) {
String clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
String tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
String clientSecret = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_SECRET);
String certPath = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_CERTIFICATE_PATH);
String username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME);
String password = configuration.get(Configuration.PROPERTY_AZURE_PASSWORD);
return Mono.fromSupplier(() -> {
if (verifyNotNull(clientId)) {
if (verifyNotNull(tenantId, clientSecret)) {
return new ClientSecretCredential(tenantId, clientId, clientSecret, identityClientOptions);
} else if (verifyNotNull(tenantId, certPath)) {
return new ClientCertificateCredential(tenantId, clientId, certPath, null, identityClientOptions);
} else if (verifyNotNull(username, password)) {
return new UsernamePasswordCredential(clientId, tenantId, username, password, identityClientOptions);
}
}
throw logger.logExceptionAsError(new ClientAuthenticationException(
"Cannot create any credentials with the current environment variables",
null));
}).flatMap(cred -> cred.getToken(request));
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
} | class EnvironmentCredential implements TokenCredential {
private final Configuration configuration;
private final IdentityClientOptions identityClientOptions;
private final ClientLogger logger = new ClientLogger(EnvironmentCredential.class);
/**
* Creates an instance of the default environment credential provider.
*
* @param identityClientOptions the options for configuring the identity client
*/
EnvironmentCredential(IdentityClientOptions identityClientOptions) {
this.configuration = Configuration.getGlobalConfiguration().clone();
this.identityClientOptions = identityClientOptions;
}
@Override
private boolean verifyNotNull(String... configs){
for(String config: configs){
if(config == null){
return false;
}
}
return true;
}
} |
Will error level enabled check come later? | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | logger.error("Error occurred in ReadMyWriteWorkflow", throwable); | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} |
Since this is logging error in tests, I don't think if we need error level enabled check for this. This is only for debugging CI | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | logger.error("Error occurred in ReadMyWriteWorkflow", throwable); | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} |
slf4j internally will check the log level, so checking log level is not needed in this case. checking log level is needed only when the information needed to be logged requires some pre-computation. For example in the following case checking log level helps: ```java // checking log level helps if (logger.isDebugEnabled()) { String diagnosticsInfoAsString = diagnosticsInfo.toJson() logger.debug("diagnostics info is {}, diagnosticsInfoAsString); } ``` but here it is not needed ```java // checking log level is not needed as no pre-computation is done here. // slf4 will do internal checking. logger.debug("endpoint is {}, endpoint); ``` | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | logger.error("Error occurred in ReadMyWriteWorkflow", throwable); | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} |
Agreed, thanks for the reference @moderakh | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | logger.error("Error occurred in ReadMyWriteWorkflow", throwable); | public void readMyWrites(boolean useNameLink) throws Exception {
int concurrency = 5;
String cmdFormat = "-serviceEndpoint %s -masterKey %s" +
" -databaseId %s" +
" -collectionId %s" +
" -consistencyLevel %s" +
" -concurrency %s" +
" -numberOfOperations %s" +
" -maxRunningTimeDuration %s" +
" -operation ReadMyWrites" +
" -connectionMode Direct" +
" -numberOfPreCreatedDocuments 100" +
" -printingInterval 60" +
"%s";
String cmd = Strings.lenientFormat(cmdFormat,
TestConfigurations.HOST,
TestConfigurations.MASTER_KEY,
database.getId(),
collection.getId(),
CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, desiredConsistency),
concurrency,
numberOfOperationsAsString,
maxRunningTime,
(useNameLink ? " -useNameLink" : ""));
Configuration cfg = new Configuration();
new JCommander(cfg, StringUtils.split(cmd));
AtomicInteger success = new AtomicInteger();
AtomicInteger error = new AtomicInteger();
ReadMyWriteWorkflow wf = new ReadMyWriteWorkflow(cfg) {
@Override
protected void onError(Throwable throwable) {
logger.error("Error occurred in ReadMyWriteWorkflow", throwable);
error.incrementAndGet();
}
@Override
protected void onSuccess() {
success.incrementAndGet();
}
};
scheduleScaleUp(delayForInitiationCollectionScaleUpInSeconds, newCollectionThroughput);
wf.run();
wf.shutdown();
int numberOfOperations = Integer.parseInt(numberOfOperationsAsString);
assertThat(error).hasValue(0);
assertThat(collectionScaleUpFailed).isFalse();
if (numberOfOperations > 0) {
assertThat(success).hasValue(numberOfOperations);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} | class ReadMyWritesConsistencyTest {
private final static Logger logger = LoggerFactory.getLogger(ReadMyWritesConsistencyTest.class);
private final AtomicBoolean collectionScaleUpFailed = new AtomicBoolean(false);
private final Duration defaultMaxRunningTime = Duration.ofMinutes(45);
private final int delayForInitiationCollectionScaleUpInSeconds = 60;
private final String desiredConsistency =
System.getProperty("DESIRED_CONSISTENCY",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("DESIRED_CONSISTENCY")), "Session"));
private final int initialCollectionThroughput = 10_000;
private final String maxRunningTime =
System.getProperty("MAX_RUNNING_TIME", StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("MAX_RUNNING_TIME")), defaultMaxRunningTime.toString()));
private final int newCollectionThroughput = 100_000;
private final String numberOfOperationsAsString =
System.getProperty("NUMBER_OF_OPERATIONS",
StringUtils.defaultString(Strings.emptyToNull(
System.getenv().get("NUMBER_OF_OPERATIONS")), "-1"));
private DocumentCollection collection;
private Database database;
@AfterClass(groups = "e2e")
public void afterClass() {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Utils.safeCleanDatabases(housekeepingClient);
Utils.safeClean(housekeepingClient, database);
Utils.safeClose(housekeepingClient);
}
@BeforeClass(groups = "e2e")
public void before_ReadMyWritesConsistencyTest() {
RequestOptions options = new RequestOptions();
options.setOfferThroughput(initialCollectionThroughput);
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
database = Utils.createDatabaseForTest(housekeepingClient);
collection = housekeepingClient.createCollection("dbs/" + database.getId(),
getCollectionDefinitionWithRangeRangeIndex(),
options).single().block().getResource();
housekeepingClient.close();
}
@DataProvider(name = "collectionLinkTypeArgProvider")
public Object[][] collectionLinkTypeArgProvider() {
return new Object[][] {
{ true },
};
}
@Test(dataProvider = "collectionLinkTypeArgProvider", groups = "e2e")
DocumentCollection getCollectionDefinitionWithRangeRangeIndex() {
PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition();
ArrayList<String> paths = new ArrayList<>();
paths.add("/mypk");
partitionKeyDef.setPaths(paths);
IndexingPolicy indexingPolicy = new IndexingPolicy();
List<IncludedPath> includedPaths = new ArrayList<>();
IncludedPath includedPath = new IncludedPath();
includedPath.setPath("/*");
Collection<Index> indexes = new ArrayList<>();
Index stringIndex = Index.range(DataType.STRING);
BridgeInternal.setProperty(stringIndex, "precision", -1);
indexes.add(stringIndex);
Index numberIndex = Index.range(DataType.NUMBER);
BridgeInternal.setProperty(numberIndex, "getPrecision", -1);
indexes.add(numberIndex);
includedPath.setIndexes(indexes);
includedPaths.add(includedPath);
indexingPolicy.setIncludedPaths(includedPaths);
DocumentCollection collectionDefinition = new DocumentCollection();
collectionDefinition.setIndexingPolicy(indexingPolicy);
collectionDefinition.setId(UUID.randomUUID().toString());
collectionDefinition.setPartitionKey(partitionKeyDef);
return collectionDefinition;
}
private void scheduleScaleUp(int delayStartInSeconds, int newThroughput) {
AsyncDocumentClient housekeepingClient = Utils.housekeepingClient();
Flux.just(0L).delayElements(Duration.ofSeconds(delayStartInSeconds), Schedulers.newSingle("ScaleUpThread")).flatMap(aVoid -> {
return housekeepingClient.queryOffers(
String.format("SELECT * FROM r WHERE r.offerResourceId = '%s'",
collection.getResourceId())
, null).flatMap(page -> Flux.fromIterable(page.getResults()))
.take(1).flatMap(offer -> {
logger.info("going to scale up collection, newThroughput {}", newThroughput);
offer.setThroughput(newThroughput);
return housekeepingClient.replaceOffer(offer);
});
}).doOnTerminate(housekeepingClient::close)
.subscribe(aVoid -> {
}, e -> {
logger.error("collectionScaleUpFailed to scale up collection", e);
collectionScaleUpFailed.set(true);
},
() -> {
logger.info("Collection Scale up request sent to the service");
}
);
}
} |
Can we use `Locale.ROOT` instead or does it have to be in US locale? | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US);
DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US);
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf_windows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US); | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX");
DateTimeFormatter dtfWindows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX");
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtfWindows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
private MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
public MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} |
```suggestion DateTimeFormatter dtfWindows= DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US); ``` | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US);
DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US);
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf_windows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US); | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX");
DateTimeFormatter dtfWindows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX");
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtfWindows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
private MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
public MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} |
I was thinking the service would always send US formatted datetimes, but i'm not sure | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US);
DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US);
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf_windows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US); | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX");
DateTimeFormatter dtfWindows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX");
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtfWindows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
private MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
public MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} |
changed back to default which was working before | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US);
DateTimeFormatter dtf_windows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX", Locale.US);
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf_windows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX", Locale.US); | private static Long parseDateToEpochSeconds(String dateTime) {
ClientLogger logger = new ClientLogger(MSIToken.class);
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("M/d/yyyy H:mm:ss XXX");
DateTimeFormatter dtfWindows = DateTimeFormatter.ofPattern("M/d/yyyy K:mm:ss a XXX");
try {
return Long.parseLong(dateTime);
} catch (NumberFormatException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtf.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
try {
return Instant.from(dtfWindows.parse(dateTime)).getEpochSecond();
} catch (DateTimeParseException e) {
logger.error(e.getMessage());
}
throw logger.logExceptionAsError(new IllegalArgumentException("Unable to parse date time " + dateTime));
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
private MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} | class MSIToken extends AccessToken {
private static final OffsetDateTime EPOCH = OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@JsonProperty(value = "token_type")
private String tokenType;
@JsonProperty(value = "access_token")
private String accessToken;
@JsonProperty(value = "expires_on")
private String expiresOn;
/**
* Creates an access token instance.
*
* @param token the token string.
* @param expiresOn the expiration time.
*/
@JsonCreator
public MSIToken(
@JsonProperty(value = "access_token") String token,
@JsonProperty(value = "expires_on") String expiresOn) {
super(token, EPOCH.plusSeconds(parseDateToEpochSeconds(expiresOn)));
this.accessToken = token;
this.expiresOn = expiresOn;
}
@Override
public String getToken() {
return accessToken;
}
} |
for the .block(), should we pass in the "retryOptions.operationTimeout" so it doesn't block forever (or I think default is 5 mins)? | public void updateCheckpoint() {
this.updateCheckpointAsync().block();
} | this.updateCheckpointAsync().block(); | public void updateCheckpoint() {
this.updateCheckpointAsync().block();
} | class EventContext {
private final PartitionContext partitionContext;
private final EventData eventData;
private final CheckpointStore checkpointStore;
private final LastEnqueuedEventProperties lastEnqueuedEventProperties;
/**
* Creates an instance of {@link EventContext}.
*
* @param partitionContext The partition information associated with the received event.
* @param eventData The event received from Event Hub.
* @param checkpointStore The checkpoint store that is used for updating checkpoints.
* @param lastEnqueuedEventProperties The properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* {@code null}.
* @throws NullPointerException If {@code partitionContext}, {@code eventData} or {@code checkpointStore} is null.
*/
public EventContext(PartitionContext partitionContext, EventData eventData,
CheckpointStore checkpointStore, LastEnqueuedEventProperties lastEnqueuedEventProperties) {
this.partitionContext = Objects.requireNonNull(partitionContext, "'partitionContext' cannot be null.");
this.eventData = Objects.requireNonNull(eventData, "'eventData' cannot be null.");
this.checkpointStore = Objects.requireNonNull(checkpointStore, "'checkpointStore' cannot be null.");
this.lastEnqueuedEventProperties = lastEnqueuedEventProperties;
}
/**
* Returns the partition information associated with the received event.
*
* @return The partition information of the received event.
*/
public PartitionContext getPartitionContext() {
return partitionContext;
}
/**
* Returns the event data received from Event Hub.
*
* @return The event data received from Event Hub.
*/
public EventData getEventData() {
return eventData;
}
/**
* Returns the properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* return {@code null}.
*
* @return The properties of the last enqueued event in this partition. If
* {@link EventProcessorClientBuilder
* method will return {@code null}.
*/
public LastEnqueuedEventProperties getLastEnqueuedEventProperties() {
return lastEnqueuedEventProperties;
}
/**
* Updates the checkpoint asynchronously for this partition using the event data in this
* {@link EventContext}. This will serve as the last known successfully processed event in this partition if the
* update is successful.
*
* @return a representation of deferred execution of this call.
*/
public Mono<Void> updateCheckpointAsync() {
Checkpoint checkpoint = new Checkpoint()
.setFullyQualifiedNamespace(partitionContext.getFullyQualifiedNamespace())
.setEventHubName(partitionContext.getEventHubName())
.setConsumerGroup(partitionContext.getConsumerGroup())
.setPartitionId(partitionContext.getPartitionId())
.setSequenceNumber(eventData.getSequenceNumber())
.setOffset(eventData.getOffset());
return this.checkpointStore.updateCheckpoint(checkpoint);
}
/**
* Updates the checkpoint synchronously for this partition using the event data. This will serve as the last known
* successfully processed event in this partition if the update is successful.
*/
} | class EventContext {
private final PartitionContext partitionContext;
private final EventData eventData;
private final CheckpointStore checkpointStore;
private final LastEnqueuedEventProperties lastEnqueuedEventProperties;
/**
* Creates an instance of {@link EventContext}.
*
* @param partitionContext The partition information associated with the received event.
* @param eventData The event received from Event Hub.
* @param checkpointStore The checkpoint store that is used for updating checkpoints.
* @param lastEnqueuedEventProperties The properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* {@code null}.
* @throws NullPointerException If {@code partitionContext}, {@code eventData} or {@code checkpointStore} is null.
*/
public EventContext(PartitionContext partitionContext, EventData eventData,
CheckpointStore checkpointStore, LastEnqueuedEventProperties lastEnqueuedEventProperties) {
this.partitionContext = Objects.requireNonNull(partitionContext, "'partitionContext' cannot be null.");
this.eventData = Objects.requireNonNull(eventData, "'eventData' cannot be null.");
this.checkpointStore = Objects.requireNonNull(checkpointStore, "'checkpointStore' cannot be null.");
this.lastEnqueuedEventProperties = lastEnqueuedEventProperties;
}
/**
* Returns the partition information associated with the received event.
*
* @return The partition information of the received event.
*/
public PartitionContext getPartitionContext() {
return partitionContext;
}
/**
* Returns the event data received from Event Hub.
*
* @return The event data received from Event Hub.
*/
public EventData getEventData() {
return eventData;
}
/**
* Returns the properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* return {@code null}.
*
* @return The properties of the last enqueued event in this partition. If
* {@link EventProcessorClientBuilder
* method will return {@code null}.
*/
public LastEnqueuedEventProperties getLastEnqueuedEventProperties() {
return lastEnqueuedEventProperties;
}
/**
* Updates the checkpoint asynchronously for this partition using the event data in this
* {@link EventContext}. This will serve as the last known successfully processed event in this partition if the
* update is successful.
*
* @return a representation of deferred execution of this call.
*/
public Mono<Void> updateCheckpointAsync() {
Checkpoint checkpoint = new Checkpoint()
.setFullyQualifiedNamespace(partitionContext.getFullyQualifiedNamespace())
.setEventHubName(partitionContext.getEventHubName())
.setConsumerGroup(partitionContext.getConsumerGroup())
.setPartitionId(partitionContext.getPartitionId())
.setSequenceNumber(eventData.getSequenceNumber())
.setOffset(eventData.getOffset());
return this.checkpointStore.updateCheckpoint(checkpoint);
}
/**
* Updates the checkpoint synchronously for this partition using the event data. This will serve as the last known
* successfully processed event in this partition if the update is successful.
*/
} |
Since this is just a delegate method to the checkpoint store's updateCheckpoint(), I think the checkpoint store (blob client or any other store) should be configured with the necessary timeouts and retry options. If the store they use is super-slow, they might want a different timeout setting than the one they use for Event Hubs (in retryOptions). Maybe we can defer adding a timeout to a later release if that's required. | public void updateCheckpoint() {
this.updateCheckpointAsync().block();
} | this.updateCheckpointAsync().block(); | public void updateCheckpoint() {
this.updateCheckpointAsync().block();
} | class EventContext {
private final PartitionContext partitionContext;
private final EventData eventData;
private final CheckpointStore checkpointStore;
private final LastEnqueuedEventProperties lastEnqueuedEventProperties;
/**
* Creates an instance of {@link EventContext}.
*
* @param partitionContext The partition information associated with the received event.
* @param eventData The event received from Event Hub.
* @param checkpointStore The checkpoint store that is used for updating checkpoints.
* @param lastEnqueuedEventProperties The properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* {@code null}.
* @throws NullPointerException If {@code partitionContext}, {@code eventData} or {@code checkpointStore} is null.
*/
public EventContext(PartitionContext partitionContext, EventData eventData,
CheckpointStore checkpointStore, LastEnqueuedEventProperties lastEnqueuedEventProperties) {
this.partitionContext = Objects.requireNonNull(partitionContext, "'partitionContext' cannot be null.");
this.eventData = Objects.requireNonNull(eventData, "'eventData' cannot be null.");
this.checkpointStore = Objects.requireNonNull(checkpointStore, "'checkpointStore' cannot be null.");
this.lastEnqueuedEventProperties = lastEnqueuedEventProperties;
}
/**
* Returns the partition information associated with the received event.
*
* @return The partition information of the received event.
*/
public PartitionContext getPartitionContext() {
return partitionContext;
}
/**
* Returns the event data received from Event Hub.
*
* @return The event data received from Event Hub.
*/
public EventData getEventData() {
return eventData;
}
/**
* Returns the properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* return {@code null}.
*
* @return The properties of the last enqueued event in this partition. If
* {@link EventProcessorClientBuilder
* method will return {@code null}.
*/
public LastEnqueuedEventProperties getLastEnqueuedEventProperties() {
return lastEnqueuedEventProperties;
}
/**
* Updates the checkpoint asynchronously for this partition using the event data in this
* {@link EventContext}. This will serve as the last known successfully processed event in this partition if the
* update is successful.
*
* @return a representation of deferred execution of this call.
*/
public Mono<Void> updateCheckpointAsync() {
Checkpoint checkpoint = new Checkpoint()
.setFullyQualifiedNamespace(partitionContext.getFullyQualifiedNamespace())
.setEventHubName(partitionContext.getEventHubName())
.setConsumerGroup(partitionContext.getConsumerGroup())
.setPartitionId(partitionContext.getPartitionId())
.setSequenceNumber(eventData.getSequenceNumber())
.setOffset(eventData.getOffset());
return this.checkpointStore.updateCheckpoint(checkpoint);
}
/**
* Updates the checkpoint synchronously for this partition using the event data. This will serve as the last known
* successfully processed event in this partition if the update is successful.
*/
} | class EventContext {
private final PartitionContext partitionContext;
private final EventData eventData;
private final CheckpointStore checkpointStore;
private final LastEnqueuedEventProperties lastEnqueuedEventProperties;
/**
* Creates an instance of {@link EventContext}.
*
* @param partitionContext The partition information associated with the received event.
* @param eventData The event received from Event Hub.
* @param checkpointStore The checkpoint store that is used for updating checkpoints.
* @param lastEnqueuedEventProperties The properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* {@code null}.
* @throws NullPointerException If {@code partitionContext}, {@code eventData} or {@code checkpointStore} is null.
*/
public EventContext(PartitionContext partitionContext, EventData eventData,
CheckpointStore checkpointStore, LastEnqueuedEventProperties lastEnqueuedEventProperties) {
this.partitionContext = Objects.requireNonNull(partitionContext, "'partitionContext' cannot be null.");
this.eventData = Objects.requireNonNull(eventData, "'eventData' cannot be null.");
this.checkpointStore = Objects.requireNonNull(checkpointStore, "'checkpointStore' cannot be null.");
this.lastEnqueuedEventProperties = lastEnqueuedEventProperties;
}
/**
* Returns the partition information associated with the received event.
*
* @return The partition information of the received event.
*/
public PartitionContext getPartitionContext() {
return partitionContext;
}
/**
* Returns the event data received from Event Hub.
*
* @return The event data received from Event Hub.
*/
public EventData getEventData() {
return eventData;
}
/**
* Returns the properties of the last enqueued event in this partition. If {@link
* EventProcessorClientBuilder
* return {@code null}.
*
* @return The properties of the last enqueued event in this partition. If
* {@link EventProcessorClientBuilder
* method will return {@code null}.
*/
public LastEnqueuedEventProperties getLastEnqueuedEventProperties() {
return lastEnqueuedEventProperties;
}
/**
* Updates the checkpoint asynchronously for this partition using the event data in this
* {@link EventContext}. This will serve as the last known successfully processed event in this partition if the
* update is successful.
*
* @return a representation of deferred execution of this call.
*/
public Mono<Void> updateCheckpointAsync() {
Checkpoint checkpoint = new Checkpoint()
.setFullyQualifiedNamespace(partitionContext.getFullyQualifiedNamespace())
.setEventHubName(partitionContext.getEventHubName())
.setConsumerGroup(partitionContext.getConsumerGroup())
.setPartitionId(partitionContext.getPartitionId())
.setSequenceNumber(eventData.getSequenceNumber())
.setOffset(eventData.getOffset());
return this.checkpointStore.updateCheckpoint(checkpoint);
}
/**
* Updates the checkpoint synchronously for this partition using the event data. This will serve as the last known
* successfully processed event in this partition if the update is successful.
*/
} |
can't we assume that the second param is always partition key? that way the caller doesn't have to explicitly wrap pk value in PartitionKey and implementation will take care of that. thoughts? | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | new PartitionKey(partitionKeyValue), | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
we should pass partition key to CosmosItemRequestOption here. The intention of this benchmark is to measure perf when pk is passed as request options. | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue), new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue), new CosmosItemRequestOptions()); | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
this will result in double serialization (which was fixed prior to this PR) | public T getResource(){
return super.getProperties().toObject(itemClassType);
} | return super.getProperties().toObject(itemClassType); | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
String bodyAsString = response.getBodyAsString();
if (StringUtils.isEmpty(bodyAsString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(bodyAsString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} |
We are following the .net model of making partitionkey object manadatory. https://github.com/Azure/azure-cosmos-dotnet-v3/blob/8f1375a30799b3acf95d843b2db0c2447cbc3876/Microsoft.Azure.Cosmos/src/Resource/Container/Container.cs#L476 | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | new PartitionKey(partitionKeyValue), | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
Changed this to avoid double serialization. Now we directly use the resource string to convert to user object. | public T getResource(){
return super.getProperties().toObject(itemClassType);
} | return super.getProperties().toObject(itemClassType); | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
String bodyAsString = response.getBodyAsString();
if (StringUtils.isEmpty(bodyAsString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(bodyAsString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} |
Added new API accept partition key and changed this call accordingly. | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue), new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue), new CosmosItemRequestOptions()); | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
ok thanks | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | new PartitionKey(partitionKeyValue), | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
int index = (int) (i % docsToRead.size());
PojoizedJson doc = docsToRead.get(index);
String partitionKeyValue = doc.getId();
Mono<CosmosAsyncItemResponse<PojoizedJson>> result = cosmosAsyncContainer.readItem(doc.getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class);
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.ReadThroughput) {
result.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
result.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
Since `responseBodyString` can be empty string, can we please make sure it is handled well when parsing it to an `itemClassType` ? Do we want to throw error in that case, or return null ? | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | return Utils.parse(responseBodyString, itemClassType); | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} |
seems CosmosItemRequestOptions is redundant now. isn't there an overload which doesn't require `CosmosItemRequestOptions` if so we should use that one. | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | new CosmosItemRequestOptions()); | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
Handled the null/empty case. Returning null | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | return Utils.parse(responseBodyString, itemClassType); | public T getResource(){
return Utils.parse(responseBodyString, itemClassType);
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} | class CosmosAsyncItemResponse<T> extends CosmosResponse<CosmosItemProperties> {
private final Class<T> itemClassType;
private final String responseBodyString;
CosmosAsyncItemResponse(ResourceResponse<Document> response, Class<T> klass) {
super(response);
this.itemClassType = klass;
responseBodyString = response.getBodyAsString();
if (StringUtils.isEmpty(responseBodyString)){
super.setProperties(null);
} else {
CosmosItemProperties props = new CosmosItemProperties(responseBodyString);
super.setProperties(props);
}
}
/**
* Gets the resource .
*
* @return the resource
*/
/**
* Gets the itemProperties
*
* @return the itemProperties
*/
public CosmosItemProperties getProperties() {
return super.getProperties();
}
} |
We don't have an overload matching that. Will add if required in next PR. | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | new CosmosItemRequestOptions()); | protected void performWorkload(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber, long i) throws InterruptedException {
String partitionKey = uuid + i;
Mono<CosmosAsyncItemResponse<PojoizedJson>> obs;
if (configuration.isDisablePassingPartitionKeyAsOptionOnWrite()) {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue));
} else {
obs = cosmosAsyncContainer.createItem(generateDocument(partitionKey, dataFieldValue),
new PartitionKey(partitionKey),
new CosmosItemRequestOptions());
}
concurrencyControlSemaphore.acquire();
if (configuration.getOperationType() == Configuration.Operation.WriteThroughput) {
obs.subscribeOn(Schedulers.parallel()).subscribe(baseSubscriber);
} else {
LatencySubscriber<CosmosAsyncItemResponse> latencySubscriber = new LatencySubscriber<>(baseSubscriber);
latencySubscriber.context = latency.time();
obs.subscribeOn(Schedulers.parallel()).subscribe(latencySubscriber);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} | class LatencySubscriber<T> extends BaseSubscriber<T> {
Timer.Context context;
BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber;
LatencySubscriber(BaseSubscriber<CosmosAsyncItemResponse> baseSubscriber) {
this.baseSubscriber = baseSubscriber;
}
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
}
@Override
protected void hookOnComplete() {
context.stop();
baseSubscriber.onComplete();
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
baseSubscriber.onError(throwable);
}
} |
What is a "type" supposed to be? | public String type() {
return null;
} | return null; | public String type() {
return null;
} | class AzureFileStore extends FileStore {
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw new IllegalStateException("AzureFileStore cannot be instantiated without a parent FileSystem");
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw new IOException("There was an error in establishing the existence of container: " + containerName, e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUnallocatedSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} | class AzureFileStore extends FileStore {
private final ClientLogger logger = new ClientLogger(AzureFileStore.class);
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw Utility.logError(logger, new IllegalStateException("AzureFileStore cannot be instantiated without " +
"a parent FileSystem"));
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw Utility.logError(logger, new IOException("There was an error in establishing the existence of " +
"container: " + containerName, e));
}
}
/**
* Returns the name of the container that underlies this file store.
*
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUnallocatedSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} |
Is it better to return -1 to be extremely obvious that we don't care about these numbers instead of accidentally implying there's no space or something like that? | public long getUnallocatedSpace() throws IOException {
return 0;
} | } | public long getUnallocatedSpace() throws IOException {
return 0;
} | class AzureFileStore extends FileStore {
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw new IllegalStateException("AzureFileStore cannot be instantiated without a parent FileSystem");
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw new IOException("There was an error in establishing the existence of container: " + containerName, e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
public String type() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} | class AzureFileStore extends FileStore {
private final ClientLogger logger = new ClientLogger(AzureFileStore.class);
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw Utility.logError(logger, new IllegalStateException("AzureFileStore cannot be instantiated without " +
"a parent FileSystem"));
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw Utility.logError(logger, new IOException("There was an error in establishing the existence of " +
"container: " + containerName, e));
}
}
/**
* Returns the name of the container that underlies this file store.
*
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
public String type() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} |
Updated PR description to note this API is not implemented yet and does not need to be reviewed. | public String type() {
return null;
} | return null; | public String type() {
return null;
} | class AzureFileStore extends FileStore {
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw new IllegalStateException("AzureFileStore cannot be instantiated without a parent FileSystem");
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw new IOException("There was an error in establishing the existence of container: " + containerName, e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUnallocatedSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} | class AzureFileStore extends FileStore {
private final ClientLogger logger = new ClientLogger(AzureFileStore.class);
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw Utility.logError(logger, new IllegalStateException("AzureFileStore cannot be instantiated without " +
"a parent FileSystem"));
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw Utility.logError(logger, new IOException("There was an error in establishing the existence of " +
"container: " + containerName, e));
}
}
/**
* Returns the name of the container that underlies this file store.
*
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUnallocatedSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} |
Updated PR description to note this API is not implemented yet and does not need to be reviewed. | public long getUnallocatedSpace() throws IOException {
return 0;
} | } | public long getUnallocatedSpace() throws IOException {
return 0;
} | class AzureFileStore extends FileStore {
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw new IllegalStateException("AzureFileStore cannot be instantiated without a parent FileSystem");
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw new IOException("There was an error in establishing the existence of container: " + containerName, e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
public String type() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} | class AzureFileStore extends FileStore {
private final ClientLogger logger = new ClientLogger(AzureFileStore.class);
private final AzureFileSystem parentFileSystem;
private final BlobContainerClient containerClient;
AzureFileStore(AzureFileSystem parentFileSystem, String containerName) throws IOException {
if (Objects.isNull(parentFileSystem)) {
throw Utility.logError(logger, new IllegalStateException("AzureFileStore cannot be instantiated without " +
"a parent FileSystem"));
}
this.parentFileSystem = parentFileSystem;
this.containerClient = this.parentFileSystem.getBlobServiceClient().getBlobContainerClient(containerName);
try {
if (!this.containerClient.exists()) {
this.containerClient.create();
}
} catch (Exception e) {
throw Utility.logError(logger, new IOException("There was an error in establishing the existence of " +
"container: " + containerName, e));
}
}
/**
* Returns the name of the container that underlies this file store.
*
* {@inheritDoc}
*/
@Override
public String name() {
return this.containerClient.getBlobContainerName();
}
/**
* {@inheritDoc}
*/
@Override
public String type() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public long getTotalSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public long getUsableSpace() throws IOException {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(Class<? extends FileAttributeView> aClass) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsFileAttributeView(String s) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileStoreAttributeView> V getFileStoreAttributeView(Class<V> aClass) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Object getAttribute(String s) throws IOException {
return null;
}
} |
Do we want to make this a constant somewhere? | public String getScheme() {
return "azb";
} | return "azb"; | public String getScheme() {
return "azb";
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
extra new line | public Iterable<FileStore> getFileStores() {
return
this.fileStores.values();
} | this.fileStores.values(); | public Iterable<FileStore> getFileStores() {
return this.fileStores.values();
} | class AzureFileSystem extends FileSystem {
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
public static final String AZURE_STORAGE_BLOCK_SIZE = "AzureStorageBlockSize";
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw new IllegalArgumentException("AzureFileSystem cannot be instantiated without a parent " +
"FileSystemProvider");
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw new IllegalArgumentException("There was an error parsing the configurations map. Please ensure all" +
"fields are set to a legal value of the correct type.");
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw new IOException("Initializing FileStores failed. FileSystem could not be opened.", e);
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw new IllegalArgumentException(String.format("No credentials were provided. Please specify one of the" +
" following when constructing an AzureFileSystem: %s, %s.", AZURE_STORAGE_ACCOUNT_KEY,
AZURE_STORAGE_SAS_TOKEN));
}
builder.httpLogOptions(new HttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw new IllegalArgumentException("The list of FileStores cannot be null.");
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} | class AzureFileSystem extends FileSystem {
private final ClientLogger logger = new ClientLogger(AzureFileSystem.class);
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
/**
* Expected type: com.azure.core.http.policy.HttpLogLevelDetail
*/
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
/**
* Expected type: com.azure.storage.common.policy.RetryPolicyType
*/
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_UPLOAD_BLOCK_SIZE = "AzureStorageUploadBlockSize";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
/**
* Expected type: Boolean
*/
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw Utility.logError(logger, new IllegalArgumentException("AzureFileSystem cannot be instantiated" +
" without a parent FileSystemProvider"));
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_UPLOAD_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw Utility.logError(logger, new IllegalArgumentException("There was an error parsing the configurations " +
"map. Please ensure all fields are set to a legal value of the correct type."));
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw Utility.logError(logger,
new IOException("Initializing FileStores failed. FileSystem could not be opened.", e));
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw Utility.logError(logger, new IllegalArgumentException(String.format("No credentials were provided. " +
"Please specify one of the following when constructing an AzureFileSystem: %s, %s.",
AZURE_STORAGE_ACCOUNT_KEY, AZURE_STORAGE_SAS_TOKEN)));
}
builder.httpLogOptions(BlobServiceClientBuilder.getDefaultHttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw Utility.logError(logger, new IllegalArgumentException("The list of FileStores cannot be null."));
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} |
This should use `BlobServiceClientBuilder.getDefaultHttpLopOptions()` as the base before setting the log level, this will continue to maintain proper query string and header logging. Right now this would wipe out all Storage specific headers and query string parameters from being logged. Future enhancement would be adding two additional configurations that would set the loggable query string parameters and header names. | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw new IllegalArgumentException(String.format("No credentials were provided. Please specify one of the" +
" following when constructing an AzureFileSystem: %s, %s.", AZURE_STORAGE_ACCOUNT_KEY,
AZURE_STORAGE_SAS_TOKEN));
}
builder.httpLogOptions(new HttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | .setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL))); | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw Utility.logError(logger, new IllegalArgumentException(String.format("No credentials were provided. " +
"Please specify one of the following when constructing an AzureFileSystem: %s, %s.",
AZURE_STORAGE_ACCOUNT_KEY, AZURE_STORAGE_SAS_TOKEN)));
}
builder.httpLogOptions(BlobServiceClientBuilder.getDefaultHttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | class AzureFileSystem extends FileSystem {
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
public static final String AZURE_STORAGE_BLOCK_SIZE = "AzureStorageBlockSize";
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw new IllegalArgumentException("AzureFileSystem cannot be instantiated without a parent " +
"FileSystemProvider");
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw new IllegalArgumentException("There was an error parsing the configurations map. Please ensure all" +
"fields are set to a legal value of the correct type.");
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw new IOException("Initializing FileStores failed. FileSystem could not be opened.", e);
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return
this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw new IllegalArgumentException("The list of FileStores cannot be null.");
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} | class AzureFileSystem extends FileSystem {
private final ClientLogger logger = new ClientLogger(AzureFileSystem.class);
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
/**
* Expected type: com.azure.core.http.policy.HttpLogLevelDetail
*/
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
/**
* Expected type: com.azure.storage.common.policy.RetryPolicyType
*/
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_UPLOAD_BLOCK_SIZE = "AzureStorageUploadBlockSize";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
/**
* Expected type: Boolean
*/
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw Utility.logError(logger, new IllegalArgumentException("AzureFileSystem cannot be instantiated" +
" without a parent FileSystemProvider"));
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_UPLOAD_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw Utility.logError(logger, new IllegalArgumentException("There was an error parsing the configurations " +
"map. Please ensure all fields are set to a legal value of the correct type."));
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw Utility.logError(logger,
new IOException("Initializing FileStores failed. FileSystem could not be opened.", e));
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw Utility.logError(logger, new IllegalArgumentException("The list of FileStores cannot be null."));
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} |
Should we add the already existing FileSystem name into the exception? | public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
} | throw new FileSystemAlreadyExistsException(); | public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
Should the name of the FileSystem be included in this message? Would help troubleshooting issues. | public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
} | throw new FileSystemNotFoundException(); | public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
I can make it a constant if you would prefer, but it feels a little redundant. I only anticipate other code calling into the provider to get the scheme when necessary, so all code would go through this method and the constant would only be returned from this method. | public String getScheme() {
return "azb";
} | return "azb"; | public String getScheme() {
return "azb";
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
Thanks for catching this. Yea I felt like those parameters were a bit overkill for an initial release and that it would be easy to add them later. | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw new IllegalArgumentException(String.format("No credentials were provided. Please specify one of the" +
" following when constructing an AzureFileSystem: %s, %s.", AZURE_STORAGE_ACCOUNT_KEY,
AZURE_STORAGE_SAS_TOKEN));
}
builder.httpLogOptions(new HttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | .setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL))); | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw Utility.logError(logger, new IllegalArgumentException(String.format("No credentials were provided. " +
"Please specify one of the following when constructing an AzureFileSystem: %s, %s.",
AZURE_STORAGE_ACCOUNT_KEY, AZURE_STORAGE_SAS_TOKEN)));
}
builder.httpLogOptions(BlobServiceClientBuilder.getDefaultHttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | class AzureFileSystem extends FileSystem {
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
public static final String AZURE_STORAGE_BLOCK_SIZE = "AzureStorageBlockSize";
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw new IllegalArgumentException("AzureFileSystem cannot be instantiated without a parent " +
"FileSystemProvider");
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw new IllegalArgumentException("There was an error parsing the configurations map. Please ensure all" +
"fields are set to a legal value of the correct type.");
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw new IOException("Initializing FileStores failed. FileSystem could not be opened.", e);
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return
this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw new IllegalArgumentException("The list of FileStores cannot be null.");
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} | class AzureFileSystem extends FileSystem {
private final ClientLogger logger = new ClientLogger(AzureFileSystem.class);
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
/**
* Expected type: com.azure.core.http.policy.HttpLogLevelDetail
*/
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
/**
* Expected type: com.azure.storage.common.policy.RetryPolicyType
*/
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_UPLOAD_BLOCK_SIZE = "AzureStorageUploadBlockSize";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
/**
* Expected type: Boolean
*/
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw Utility.logError(logger, new IllegalArgumentException("AzureFileSystem cannot be instantiated" +
" without a parent FileSystemProvider"));
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_UPLOAD_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw Utility.logError(logger, new IllegalArgumentException("There was an error parsing the configurations " +
"map. Please ensure all fields are set to a legal value of the correct type."));
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw Utility.logError(logger,
new IOException("Initializing FileStores failed. FileSystem could not be opened.", e));
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw Utility.logError(logger, new IllegalArgumentException("The list of FileStores cannot be null."));
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} |
Probably a good idea. I'll double check the docs, too. | public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
} | throw new FileSystemAlreadyExistsException(); | public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
Same as above. | public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
} | throw new FileSystemNotFoundException(); | public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
public String getScheme() {
return "azb";
}
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
It's possible a user may want the constant when building their URI right? | public String getScheme() {
return "azb";
} | return "azb"; | public String getScheme() {
return "azb";
} | class AzureFileSystemProvider extends FileSystemProvider {
private static final String ACCOUNT_QUERY_KEY = "account";
private ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw new FileSystemAlreadyExistsException();
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw new FileSystemNotFoundException();
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw new IllegalArgumentException("URI scheme does not match this provider");
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw new IllegalArgumentException("URI does not contain a query component. FileSystems require a URI of " +
"the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw new IllegalArgumentException("No account name provided in URI query.");
}
return accountName;
}
} | class AzureFileSystemProvider extends FileSystemProvider {
private final ClientLogger logger = new ClientLogger(AzureFileSystemProvider.class);
private static final String ACCOUNT_QUERY_KEY = "account";
private final ConcurrentMap<String, FileSystem> openFileSystems;
/**
* Creates an AzureFileSystemProvider.
*/
public AzureFileSystemProvider() {
this.openFileSystems = new ConcurrentHashMap<>();
}
/**
* Returns {@code "azb".}
*/
@Override
/**
* The format of a {@code URI} identifying a file system is {@code "azb:
* <p>
* Once closed, a file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> config) throws IOException {
String accountName = extractAccountName(uri);
if (this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemAlreadyExistsException("Name: " + accountName));
}
AzureFileSystem afs = new AzureFileSystem(this, accountName, config);
this.openFileSystems.put(accountName, afs);
return afs;
}
/**
* The format of a {@code URI} identifying an file system is {@code "azb:
* <p>
* Trying to retrieve a closed file system will throw a {@link FileSystemNotFoundException}. Once closed, a
* file system with the same identifier may be reopened.
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem(URI uri) {
String accountName = extractAccountName(uri);
if (!this.openFileSystems.containsKey(accountName)) {
throw Utility.logError(this.logger, new FileSystemNotFoundException("Name: " + accountName));
}
return this.openFileSystems.get(accountName);
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(URI uri) {
return getFileSystem(uri).getPath(uri.getPath());
}
/**
* {@inheritDoc}
*/
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> set,
FileAttribute<?>... fileAttributes) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public DirectoryStream<Path> newDirectoryStream(Path path, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void createDirectory(Path path, FileAttribute<?>... fileAttributes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void delete(Path path) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void copy(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public void move(Path path, Path path1, CopyOption... copyOptions) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public boolean isSameFile(Path path, Path path1) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void checkAccess(Path path, AccessMode... accessModes) throws IOException {
}
/**
* {@inheritDoc}
*/
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> aClass, LinkOption... linkOptions) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> aClass, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, Object> readAttributes(Path path, String s, LinkOption... linkOptions) throws IOException {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public void setAttribute(Path path, String s, Object o, LinkOption... linkOptions) throws IOException {
}
void closeFileSystem(String fileSystemName) {
this.openFileSystems.remove(fileSystemName);
}
private String extractAccountName(URI uri) {
if (!uri.getScheme().equals(this.getScheme())) {
throw Utility.logError(this.logger, new IllegalArgumentException(
"URI scheme does not match this provider"));
}
if (CoreUtils.isNullOrEmpty(uri.getQuery())) {
throw Utility.logError(this.logger, new IllegalArgumentException("URI does not contain a query " +
"component. FileSystems require a URI of the format \"azb:
}
String accountName = Flux.fromArray(uri.getQuery().split("&"))
.filter(s -> s.startsWith(ACCOUNT_QUERY_KEY + "="))
.switchIfEmpty(Mono.error(Utility.logError(this.logger, new IllegalArgumentException(
"URI does not contain an \"" + ACCOUNT_QUERY_KEY + "=\" parameter. FileSystems require a URI " +
"of the format \"azb:
.map(s -> s.substring(ACCOUNT_QUERY_KEY.length() + 1))
.blockLast();
if (CoreUtils.isNullOrEmpty(accountName)) {
throw Utility.logError(logger, new IllegalArgumentException("No account name provided in URI query."));
}
return accountName;
}
} |
Agreed on the parameters not being needed during the initial preview. | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw new IllegalArgumentException(String.format("No credentials were provided. Please specify one of the" +
" following when constructing an AzureFileSystem: %s, %s.", AZURE_STORAGE_ACCOUNT_KEY,
AZURE_STORAGE_SAS_TOKEN));
}
builder.httpLogOptions(new HttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | .setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL))); | private BlobServiceClient buildBlobServiceClient(String accountName, Map<String,?> config) {
String scheme = !config.containsKey(AZURE_STORAGE_USE_HTTPS)
|| (Boolean) config.get(AZURE_STORAGE_USE_HTTPS)
? "https" : "http";
BlobServiceClientBuilder builder = new BlobServiceClientBuilder()
.endpoint(String.format(AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE, scheme, accountName));
if (config.containsKey(AZURE_STORAGE_ACCOUNT_KEY)) {
builder.credential(new StorageSharedKeyCredential(accountName,
(String)config.get(AZURE_STORAGE_ACCOUNT_KEY)));
}
else if (config.containsKey(AZURE_STORAGE_SAS_TOKEN)) {
builder.sasToken((String) config.get(AZURE_STORAGE_SAS_TOKEN));
}
else {
throw Utility.logError(logger, new IllegalArgumentException(String.format("No credentials were provided. " +
"Please specify one of the following when constructing an AzureFileSystem: %s, %s.",
AZURE_STORAGE_ACCOUNT_KEY, AZURE_STORAGE_SAS_TOKEN)));
}
builder.httpLogOptions(BlobServiceClientBuilder.getDefaultHttpLogOptions()
.setLogLevel((HttpLogDetailLevel)config.get(AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL)));
RequestRetryOptions retryOptions = new RequestRetryOptions(
(RetryPolicyType)config.get(AZURE_STORAGE_RETRY_POLICY_TYPE),
(Integer)config.get(AZURE_STORAGE_MAX_TRIES),
(Integer)config.get(AZURE_STORAGE_TRY_TIMEOUT),
(Long)config.get(AZURE_STORAGE_RETRY_DELAY_IN_MS),
(Long)config.get(AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS),
(String)config.get(AZURE_STORAGE_SECONDARY_HOST));
builder.retryOptions(retryOptions);
builder.httpClient((HttpClient)config.get(AZURE_STORAGE_HTTP_CLIENT));
return builder.buildClient();
} | class AzureFileSystem extends FileSystem {
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
public static final String AZURE_STORAGE_BLOCK_SIZE = "AzureStorageBlockSize";
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw new IllegalArgumentException("AzureFileSystem cannot be instantiated without a parent " +
"FileSystemProvider");
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw new IllegalArgumentException("There was an error parsing the configurations map. Please ensure all" +
"fields are set to a legal value of the correct type.");
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw new IOException("Initializing FileStores failed. FileSystem could not be opened.", e);
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return
this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw new IllegalArgumentException("The list of FileStores cannot be null.");
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} | class AzureFileSystem extends FileSystem {
private final ClientLogger logger = new ClientLogger(AzureFileSystem.class);
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_ACCOUNT_KEY = "AzureStorageAccountKey";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SAS_TOKEN = "AzureStorageSasToken";
/**
* Expected type: com.azure.core.http.policy.HttpLogLevelDetail
*/
public static final String AZURE_STORAGE_HTTP_LOG_DETAIL_LEVEL = "AzureStorageHttpLogDetailLevel";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_MAX_TRIES = "AzureStorageMaxTries";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_TRY_TIMEOUT = "AzureStorageTryTimeout";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_RETRY_DELAY_IN_MS = "AzureStorageRetryDelayInMs";
/**
* Expected type: Long
*/
public static final String AZURE_STORAGE_MAX_RETRY_DELAY_IN_MS = "AzureStorageMaxRetryDelayInMs";
/**
* Expected type: com.azure.storage.common.policy.RetryPolicyType
*/
public static final String AZURE_STORAGE_RETRY_POLICY_TYPE = "AzureStorageRetryPolicyType";
/**
* Expected type: String
*/
public static final String AZURE_STORAGE_SECONDARY_HOST = "AzureStorageSecondaryHost";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_UPLOAD_BLOCK_SIZE = "AzureStorageUploadBlockSize";
/**
* Expected type: Integer
*/
public static final String AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES = "AzureStorageDownloadResumeRetries";
/**
* Expected type: Boolean
*/
public static final String AZURE_STORAGE_USE_HTTPS = "AzureStorageUseHttps";
public static final String AZURE_STORAGE_HTTP_CLIENT = "AzureStorageHttpClient";
public static final String AZURE_STORAGE_FILE_STORES = "AzureStorageFileStores";
private static final String AZURE_STORAGE_BLOB_ENDPOINT_TEMPLATE = "%s:
private final AzureFileSystemProvider parentFileSystemProvider;
private final BlobServiceClient blobServiceClient;
private final Integer blockSize;
private final Integer downloadResumeRetries;
private final Map<String, FileStore> fileStores;
private boolean closed;
AzureFileSystem(AzureFileSystemProvider parentFileSystemProvider, String accountName, Map<String, ?> config)
throws IOException {
if (Objects.isNull(parentFileSystemProvider)) {
throw Utility.logError(logger, new IllegalArgumentException("AzureFileSystem cannot be instantiated" +
" without a parent FileSystemProvider"));
}
this.parentFileSystemProvider = parentFileSystemProvider;
try {
this.blobServiceClient = this.buildBlobServiceClient(accountName, config);
this.blockSize = (Integer) config.get(AZURE_STORAGE_UPLOAD_BLOCK_SIZE);
this.downloadResumeRetries = (Integer) config.get(AZURE_STORAGE_DOWNLOAD_RESUME_RETRIES);
} catch (Exception e) {
throw Utility.logError(logger, new IllegalArgumentException("There was an error parsing the configurations " +
"map. Please ensure all fields are set to a legal value of the correct type."));
}
try {
this.fileStores = this.initializeFileStores(config);
} catch (IOException e) {
throw Utility.logError(logger,
new IOException("Initializing FileStores failed. FileSystem could not be opened.", e));
}
this.closed = false;
}
/**
* {@inheritDoc}
*/
@Override
public FileSystemProvider provider() {
return this.parentFileSystemProvider;
}
/**
* Closing the file system will not block on outstanding operations. Any operations in progress will be allowed to
* terminate naturally after the file system is closed, though no further operations may be started after the
* parent file system is closed.
*
* Once closed, a file system with the same identifier as the one closed may be re-opened.
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
this.closed = true;
this.parentFileSystemProvider.closeFileSystem(this.getFileSystemName());
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen() {
return !this.closed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly() {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public String getSeparator() {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Path> getRootDirectories() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<FileStore> getFileStores() {
return this.fileStores.values();
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> supportedFileAttributeViews() {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getPath(String s, String... strings) {
return new AzurePath(this, s, strings);
}
/**
* {@inheritDoc}
*/
@Override
public PathMatcher getPathMatcher(String s) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public WatchService newWatchService() throws IOException {
throw new UnsupportedOperationException();
}
String getFileSystemName() {
return this.blobServiceClient.getAccountName();
}
BlobServiceClient getBlobServiceClient() {
return this.blobServiceClient;
}
private Map<String, FileStore> initializeFileStores(Map<String, ?> config) throws IOException {
String fileStoreNames = (String)config.get(AZURE_STORAGE_FILE_STORES);
if (CoreUtils.isNullOrEmpty(fileStoreNames)) {
throw Utility.logError(logger, new IllegalArgumentException("The list of FileStores cannot be null."));
}
Map<String, FileStore> fileStores = new HashMap<>();
for (String fileStoreName : fileStoreNames.split(",")) {
fileStores.put(fileStoreName, new AzureFileStore(this, fileStoreName));
}
return fileStores;
}
} |
Why not use [Math.pow()](https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#pow-double-double-) method instead of implementing own power method? | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | } | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} |
This case is interesting given that writing to the blob may be delayed a non-determinant amount of time, this differs from the other cases of `overwrite` where their write operation should happen relatively soon compared to the validation check. | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | } | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} |
This makes sense to me, the only other way would be to pass it in to the outputstream and it would fail upon closing - so that would be potentially annoying for the customer | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | } | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} |
Agreed, failing fast is the better option here. | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | } | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
BlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new BlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return getBlobOutputStream(null, null, null, null, requestConditions);
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} | class EncryptedBlobClient extends BlobClient {
private final ClientLogger logger = new ClientLogger(EncryptedBlobClient.class);
private final EncryptedBlobAsyncClient encryptedBlobAsyncClient;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*/
EncryptedBlobClient(EncryptedBlobAsyncClient encryptedBlobAsyncClient) {
super(encryptedBlobAsyncClient);
this.encryptedBlobAsyncClient = encryptedBlobAsyncClient;
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(false);
}
/**
* Creates and opens an output stream to write data to the block blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether or not to overwrite, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the block blob. If the blob already exists on the service, it
* will be overwritten.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* @param parallelTransferOptions {@link ParallelTransferOptions} used to configure buffered uploading.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the destination blob.
* @param requestConditions {@link BlobRequestConditions}
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier,
BlobRequestConditions requestConditions) {
return BlobOutputStream.blockBlobOutputStream(encryptedBlobAsyncClient, parallelTransferOptions, headers,
metadata, tier, requestConditions);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
*/
public void uploadFromFile(String filePath) {
uploadFromFile(filePath, false);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param overwrite Whether or not to overwrite should data already exist on the blob
*/
public void uploadFromFile(String filePath, boolean overwrite) {
if (!overwrite && exists()) {
throw logger.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
uploadFromFile(filePath, null, null, null, null, null, null);
}
/**
* Creates a new block blob, or updates the content of an existing block blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.cryptography.EncryptedBlobClient.uploadFromFile
*
* @param filePath Path of the file to upload
* @param parallelTransferOptions {@link ParallelTransferOptions} to use to upload from file. Number of parallel
* transfers parameter is ignored.
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param tier {@link AccessTier} for the uploaded blob
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @throws UncheckedIOException If an I/O error occurs
*/
public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions,
BlobHttpHeaders headers, Map<String, String> metadata, AccessTier tier, BlobRequestConditions requestConditions,
Duration timeout) throws UncheckedIOException {
Mono<Void> upload = this.encryptedBlobAsyncClient.uploadFromFile(filePath, parallelTransferOptions,
headers, metadata, tier, requestConditions);
try {
StorageImplUtils.blockWithOptionalTimeout(upload, timeout);
} catch (UncheckedIOException e) {
throw logger.logExceptionAsError(e);
}
}
} |
Yeah, we can use Math.pow(), unless @mikeharder had any concerns that this might create any performance impact or inconsistencies across languages. | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | } | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} |
I was afraid of the conversion between `double` and `int`. But if you know this is safe you can replace with `Math.pow()`. | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | } | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} |
Math.pow() works for this usecase, we can replace it | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | } | private static int Pow(int value, int exponent) {
int power = 1;
for (int i=0; i < exponent; i++) {
power *= value;
}
return power;
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} | class SleepTest extends PerfStressTest<PerfStressOptions> {
private static final AtomicInteger _instanceCount = new AtomicInteger();
private final int _secondsPerOperation;
public SleepTest(PerfStressOptions options) {
super(options);
int instanceCount = _instanceCount.incrementAndGet();
_secondsPerOperation = Pow(2, instanceCount);
}
@Override
public void Run() {
try {
Thread.sleep(_secondsPerOperation * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public Mono<Void> RunAsync() {
return Mono.delay(Duration.ofSeconds(_secondsPerOperation)).then();
}
} |
We should remove all the score equal comparison. Currently. the nightly live test failed because of it. https://dev.azure.com/azure-sdk/internal/_build/results?buildId=242950&view=logs&j=4d5db6ce-0b7f-527e-b115-2367ee6e1fef&t=b7d16dfc-4abf-5ff2-eaa7-e82f3df2ef1b | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.99983596801757812);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859); | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.0);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.0);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentNamedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", NAMED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", NAMED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentPiiInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.PII_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.PII_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentLinkedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.LINKED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.LINKED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentKeyPhraseInputs() {
return Arrays.asList(
new TextDocumentInput("0", KEY_PHRASE_INPUTS.get(0)),
new TextDocumentInput("1", KEY_PHRASE_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentSentimentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.SENTIMENT_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.SENTIMENT_INPUTS.get(1))
);
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 1.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 1.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.75);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.18693659716732069, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.1, 0.5, 0.4, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 0.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 0.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.0);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.0, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.0, 0.0, 0.0, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} |
Updated to not check for equality for offset/length/score properties on models. | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.99983596801757812);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859); | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.0);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.0);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentNamedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", NAMED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", NAMED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentPiiInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.PII_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.PII_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentLinkedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.LINKED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.LINKED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentKeyPhraseInputs() {
return Arrays.asList(
new TextDocumentInput("0", KEY_PHRASE_INPUTS.get(0)),
new TextDocumentInput("1", KEY_PHRASE_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentSentimentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.SENTIMENT_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.SENTIMENT_INPUTS.get(1))
);
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 1.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 1.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.75);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.18693659716732069, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.1, 0.5, 0.4, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 0.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 0.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.0);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.0, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.0, 0.0, 0.0, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} |
should the score value need to clean up also? | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.99983596801757812);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859); | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.0);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.0);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentNamedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", NAMED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", NAMED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentPiiInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.PII_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.PII_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentLinkedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.LINKED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.LINKED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentKeyPhraseInputs() {
return Arrays.asList(
new TextDocumentInput("0", KEY_PHRASE_INPUTS.get(0)),
new TextDocumentInput("1", KEY_PHRASE_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentSentimentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.SENTIMENT_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.SENTIMENT_INPUTS.get(1))
);
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 1.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 1.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.75);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.18693659716732069, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.1, 0.5, 0.4, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 0.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 0.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.0);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.0, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.0, 0.0, 0.0, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} |
same question apply to other hard-coded numerical values | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.99983596801757812);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859); | static DocumentResultCollection<RecognizeEntitiesResult> getExpectedBatchNamedEntities() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.0);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0);
NamedEntity namedEntity3 = new NamedEntity("Microsoft", "Organization", null, 10, 9, 0.0);
List<NamedEntity> namedEntityList1 = Arrays.asList(namedEntity1, namedEntity2);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentNamedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", NAMED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", NAMED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentPiiInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.PII_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.PII_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentLinkedEntityInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.LINKED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.LINKED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentKeyPhraseInputs() {
return Arrays.asList(
new TextDocumentInput("0", KEY_PHRASE_INPUTS.get(0)),
new TextDocumentInput("1", KEY_PHRASE_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentSentimentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TestUtils.SENTIMENT_INPUTS.get(0)),
new TextDocumentInput("1", TestUtils.SENTIMENT_INPUTS.get(1))
);
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 1.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 1.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.75);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.18693659716732069, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.1, 0.5, 0.4, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> NAMED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final List<String> PII_ENTITY_INPUTS = Arrays.asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un document escrito en Español.", "~@!~:)");
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1)),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*/
static DocumentResultCollection<DetectLanguageResult> getExpectedBatchDetectedLanguages() {
DetectedLanguage detectedLanguage1 = new DetectedLanguage("English", "en", 0.0);
DetectedLanguage detectedLanguage2 = new DetectedLanguage("Spanish", "es", 0.0);
DetectedLanguage detectedLanguage3 = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
List<DetectedLanguage> detectedLanguageList1 = Collections.singletonList(detectedLanguage1);
List<DetectedLanguage> detectedLanguageList2 = Collections.singletonList(detectedLanguage2);
List<DetectedLanguage> detectedLanguageList3 = Collections.singletonList(detectedLanguage3);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(26, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(39, 1);
TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics(6, 1);
DetectLanguageResult detectLanguageResult1 = new DetectLanguageResult("0", textDocumentStatistics1, null, detectedLanguage1, detectedLanguageList1);
DetectLanguageResult detectLanguageResult2 = new DetectLanguageResult("1", textDocumentStatistics2, null, detectedLanguage2, detectedLanguageList2);
DetectLanguageResult detectLanguageResult3 = new DetectLanguageResult("2", textDocumentStatistics3, null, detectedLanguage3, detectedLanguageList3);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(detectLanguageResult1, detectLanguageResult2, detectLanguageResult3);
return new DocumentResultCollection<>(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Named Entities
*/
/**
* Helper method to get the expected Batch PII Entities
*/
static DocumentResultCollection<RecognizePiiEntitiesResult> getExpectedBatchPiiEntities() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0);
NamedEntity namedEntity2 = new NamedEntity("111000025", "ABA Routing Number", "", 18, 9, 0.0);
List<NamedEntity> namedEntityList1 = Collections.singletonList(namedEntity1);
List<NamedEntity> namedEntityList2 = Collections.singletonList(namedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, namedEntityList1);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, namedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizePiiEntitiesResult> recognizeEntitiesResultList = Arrays.asList(recognizeEntitiesResult1, recognizeEntitiesResult2);
return new DocumentResultCollection<>(recognizeEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Linked Entities
*/
static DocumentResultCollection<RecognizeLinkedEntitiesResult> getExpectedBatchLinkedEntities() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26);
LinkedEntityMatch linkedEntityMatch2 = new LinkedEntityMatch("Microsoft", 0.0, 9, 10);
LinkedEntity linkedEntity1 = new LinkedEntity(
"Seattle", Collections.singletonList(linkedEntityMatch1),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntity linkedEntity2 = new LinkedEntity(
"Microsoft", Collections.singletonList(linkedEntityMatch2),
"en", "Microsoft", "https:
"Wikipedia");
List<LinkedEntity> linkedEntityList1 = Collections.singletonList(linkedEntity1);
List<LinkedEntity> linkedEntityList2 = Collections.singletonList(linkedEntity2);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult1 = new RecognizeLinkedEntitiesResult("0", textDocumentStatistics1, null, linkedEntityList1);
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult2 = new RecognizeLinkedEntitiesResult("1", textDocumentStatistics2, null, linkedEntityList2);
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList = Arrays.asList(recognizeLinkedEntitiesResult1, recognizeLinkedEntitiesResult2);
return new DocumentResultCollection<>(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Key Phrases
*/
static DocumentResultCollection<ExtractKeyPhraseResult> getExpectedBatchKeyPhrases() {
List<String> keyPhrasesList1 = Arrays.asList("input text", "world");
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, keyPhrasesList1);
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, Collections.singletonList("monde"));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new DocumentResultCollection<>(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static DocumentResultCollection<AnalyzeSentimentResult> getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
final TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(67, 1);
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED,
0.0, 0.0, 0.0, 66, 0);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0", textDocumentStatistics1,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 32)
));
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1", textDocumentStatistics2,
null,
expectedDocumentSentiment,
Arrays.asList(
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 0),
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 36)
));
return new DocumentResultCollection<>(Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
private TestUtils() {
}
} |
Any reason this doesn't default `destinationFileSystem` to the current file system? Looks like `renameWithResponse` will pass the current file system as the `destinationFileSystem` value. Seems odd to allow null in one case and not in another very similar case. | DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) {
if (CoreUtils.isNullOrEmpty(destinationFileSystem)) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'destinationFileSystem' can not be set to null"));
}
if (CoreUtils.isNullOrEmpty(destinationPath)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null"));
}
String newDfsEndpoint = BlobUrlParts.parse(getPathUrl())
.setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new DataLakePathAsyncClient(getHttpPipeline(), newDfsEndpoint, serviceVersion, accountName,
destinationFileSystem, destinationPath, pathResourceType,
prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient());
} | throw logger.logExceptionAsError(new IllegalArgumentException( | DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
if (CoreUtils.isNullOrEmpty(destinationPath)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null"));
}
String newDfsEndpoint = BlobUrlParts.parse(getPathUrl())
.setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new DataLakePathAsyncClient(getHttpPipeline(), newDfsEndpoint, serviceVersion, accountName,
destinationFileSystem, destinationPath, pathResourceType,
prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient());
} | class DataLakePathAsyncClient {
private final ClientLogger logger = new ClientLogger(DataLakePathAsyncClient.class);
protected final DataLakeStorageClientImpl dataLakeStorage;
private final String accountName;
private final String fileSystemName;
private final String pathName;
private final DataLakeServiceVersion serviceVersion;
protected PathResourceType pathResourceType;
protected final BlockBlobAsyncClient blockBlobAsyncClient;
/**
* Package-private constructor for use by {@link DataLakePathClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param fileSystemName The file system name.
* @param pathName The path name.
* @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient}
*/
DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion,
String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType,
BlockBlobAsyncClient blockBlobAsyncClient) {
this.dataLakeStorage = new DataLakeStorageClientBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion.getVersion())
.build();
this.serviceVersion = serviceVersion;
this.accountName = accountName;
this.fileSystemName = fileSystemName;
this.pathName = Utility.urlEncode(Utility.urlDecode(pathName));
this.pathResourceType = pathResourceType;
this.blockBlobAsyncClient = blockBlobAsyncClient;
}
/**
* Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values.
*
* @param metadata The metadata.
*
* @return The metadata represented as a String.
*/
static String buildMetadataString(Map<String, String> metadata) {
StringBuilder sb = new StringBuilder();
if (!CoreUtils.isNullOrEmpty(metadata)) {
for (final Map.Entry<String, String> entry : metadata.entrySet()) {
if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) {
throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
} else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) {
throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
}
/*
The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata
will work as normal. Doing this encoding for the customers preserves the existing behavior of
metadata.
*/
sb.append(entry.getKey()).append('=')
.append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)),
StandardCharsets.UTF_8)).append(',');
}
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
/**
* Gets the URL of the object represented by this client on the Data Lake service.
*
* @return the URL.
*/
String getPathUrl() {
return dataLakeStorage.getUrl();
}
/**
* Gets the associated account name.
*
* @return Account name associated with this storage resource.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the name of the File System in which this object lives.
*
* @return The name of the File System.
*/
public String getFileSystemName() {
return fileSystemName;
}
/**
* Gets the path of this object, not including the name of the resource itself.
*
* @return The path of the object.
*/
String getObjectPath() {
return (pathName == null) ? null : Utility.urlDecode(pathName);
}
/**
* Gets the name of this object, not including its full path.
*
* @return The name of the object.
*/
String getObjectName() {
String[] pathParts = getObjectPath().split("/");
return pathParts[pathParts.length - 1];
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
public HttpPipeline getHttpPipeline() {
return dataLakeStorage.getHttpPipeline();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public DataLakeServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Creates a resource. By default this method will not overwrite an existing path.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create}
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param overwrite Whether or not to overwrite, should data exist on the file.
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create(boolean overwrite) {
try {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param permissions POSIX access permissions for the resource owner, the resource owning group, and others.
* @param umask Restricts permissions of the resource to be created.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* PathItem}.
*/
public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers,
Map<String, String> metadata, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().createWithRestResponseAsync(resourceType, null, null, null, null,
buildMetadataString(metadata), permissions, umask, null, null, headers, lac, mac, null, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param recursive Whether or not to delete all paths beneath the directory.
* @param requestConditions {@link DataLakeRequestConditions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing containing status code and HTTP headers
*/
Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().deleteWithRestResponseAsync(recursive, null, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, null));
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @return A reactive response signalling completion.
*/
public Mono<Void> setMetadata(Map<String, String> metadata) {
try {
return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setMetadataWithResponse(metadata,
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @return A reactive response signalling completion.
*/
public Mono<Void> setHttpHeaders(PathHttpHeaders headers) {
try {
return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resources's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers),
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resources's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<PathProperties> getProperties() {
try {
return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resource's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) {
try {
return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException)
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue())));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.exists}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Boolean> exists() {
try {
return existsWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Response<Boolean>> existsWithResponse() {
try {
return blockBlobAsyncClient.existsWithResponse()
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group,
String owner) {
try {
return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList,
String group, String owner, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) {
try {
return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList,
PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
String permissionsString = permissions == null ? null : permissions.toString();
String accessControlListString =
accessControlList == null
? null
: PathAccessControlEntry.serializeList(accessControlList);
return this.dataLakeStorage.paths().setAccessControlWithRestResponseAsync(null, owner, group, permissionsString,
accessControlListString, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource access control.
*/
public Mono<PathAccessControl> getAccessControl() {
try {
return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false,
* user identity values returned as Azure Active Directory Object IDs. Default value is false.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource access control.
*/
public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions,
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions, Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().getPropertiesWithRestResponseAsync(
PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathAccessControl(
PathAccessControlEntry.parseList(response.getDeserializedHeaders().getAcl()),
PathPermissions.parseSymbolic(response.getDeserializedHeaders().getPermissions()),
response.getDeserializedHeaders().getGroup(), response.getDeserializedHeaders().getOwner())));
}
/**
* Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param destinationFileSystem The file system of the destination within the account.
* @param destinationPath The path of the destination relative to the file system name
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* DataLakePathAsyncClient} used to interact with the path created.
*/
Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Context context) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions()
: destinationRequestConditions;
sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions()
: sourceRequestConditions;
SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions()
.setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince())
.setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince())
.setSourceIfMatch(sourceRequestConditions.getIfMatch())
.setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch());
LeaseAccessConditions destLac = new LeaseAccessConditions()
.setLeaseId(destinationRequestConditions.getLeaseId());
ModifiedAccessConditions destMac = new ModifiedAccessConditions()
.setIfMatch(destinationRequestConditions.getIfMatch())
.setIfNoneMatch(destinationRequestConditions.getIfNoneMatch())
.setIfModifiedSince(destinationRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince());
DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath);
String renameSource = "/" + this.fileSystemName + "/" + pathName;
return dataLakePathAsyncClient.dataLakeStorage.paths().createWithRestResponseAsync(null /* pathResourceType */,
null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(),
null /* metadata */, null /* permissions */, null /* umask */, null /* request id */, null /* timeout */,
null /* pathHttpHeaders */, destLac, destMac, sourceConditions, context)
.map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient));
}
/**
* Takes in a destination and creates a DataLakePathAsyncClient with a new path
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return A DataLakePathAsyncClient
*/
/**
* Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return An updated SpecializedBlobClientBuilder
*/
SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) {
String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(),
"blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new SpecializedBlobClientBuilder()
.pipeline(getHttpPipeline())
.endpoint(newBlobEndpoint)
.serviceVersion(BlobServiceVersion.getLatest());
}
BlockBlobAsyncClient getBlockBlobAsyncClient() {
return this.blockBlobAsyncClient;
}
/**
* Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}.
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS.
* </p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
* @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values.
* @see DataLakeServiceAsyncClient
* to get a user delegation key.
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues,
UserDelegationKey userDelegationKey) {
return blockBlobAsyncClient.generateUserDelegationSas(
Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues),
Transforms.toBlobUserDelegationKey(userDelegationKey));
}
/**
* Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}
* Note : The client must be authenticated via {@link StorageSharedKeyCredential}
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) {
return blockBlobAsyncClient.generateSas(Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues));
}
} | class DataLakePathAsyncClient {
private final ClientLogger logger = new ClientLogger(DataLakePathAsyncClient.class);
protected final DataLakeStorageClientImpl dataLakeStorage;
private final String accountName;
private final String fileSystemName;
private final String pathName;
private final DataLakeServiceVersion serviceVersion;
protected PathResourceType pathResourceType;
protected final BlockBlobAsyncClient blockBlobAsyncClient;
/**
* Package-private constructor for use by {@link DataLakePathClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param fileSystemName The file system name.
* @param pathName The path name.
* @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient}
*/
DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion,
String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType,
BlockBlobAsyncClient blockBlobAsyncClient) {
this.dataLakeStorage = new DataLakeStorageClientBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion.getVersion())
.build();
this.serviceVersion = serviceVersion;
this.accountName = accountName;
this.fileSystemName = fileSystemName;
this.pathName = Utility.urlEncode(Utility.urlDecode(pathName));
this.pathResourceType = pathResourceType;
this.blockBlobAsyncClient = blockBlobAsyncClient;
}
/**
* Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values.
*
* @param metadata The metadata.
*
* @return The metadata represented as a String.
*/
static String buildMetadataString(Map<String, String> metadata) {
StringBuilder sb = new StringBuilder();
if (!CoreUtils.isNullOrEmpty(metadata)) {
for (final Map.Entry<String, String> entry : metadata.entrySet()) {
if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) {
throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
} else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) {
throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
}
/*
The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata
will work as normal. Doing this encoding for the customers preserves the existing behavior of
metadata.
*/
sb.append(entry.getKey()).append('=')
.append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)),
StandardCharsets.UTF_8)).append(',');
}
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
/**
* Gets the URL of the object represented by this client on the Data Lake service.
*
* @return the URL.
*/
String getPathUrl() {
return dataLakeStorage.getUrl();
}
/**
* Gets the associated account name.
*
* @return Account name associated with this storage resource.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the name of the File System in which this object lives.
*
* @return The name of the File System.
*/
public String getFileSystemName() {
return fileSystemName;
}
/**
* Gets the path of this object, not including the name of the resource itself.
*
* @return The path of the object.
*/
String getObjectPath() {
return (pathName == null) ? null : Utility.urlDecode(pathName);
}
/**
* Gets the name of this object, not including its full path.
*
* @return The name of the object.
*/
String getObjectName() {
String[] pathParts = getObjectPath().split("/");
return pathParts[pathParts.length - 1];
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
public HttpPipeline getHttpPipeline() {
return dataLakeStorage.getHttpPipeline();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public DataLakeServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Creates a resource. By default this method will not overwrite an existing path.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create}
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param overwrite Whether or not to overwrite, should data exist on the file.
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create(boolean overwrite) {
try {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param permissions POSIX access permissions for the resource owner, the resource owning group, and others.
* @param umask Restricts permissions of the resource to be created.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* PathItem}.
*/
public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers,
Map<String, String> metadata, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().createWithRestResponseAsync(resourceType, null, null, null, null,
buildMetadataString(metadata), permissions, umask, null, null, headers, lac, mac, null, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param recursive Whether or not to delete all paths beneath the directory.
* @param requestConditions {@link DataLakeRequestConditions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing containing status code and HTTP headers
*/
Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().deleteWithRestResponseAsync(recursive, null, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, null));
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @return A reactive response signalling completion.
*/
public Mono<Void> setMetadata(Map<String, String> metadata) {
try {
return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setMetadataWithResponse(metadata,
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @return A reactive response signalling completion.
*/
public Mono<Void> setHttpHeaders(PathHttpHeaders headers) {
try {
return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resources's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers),
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resources's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<PathProperties> getProperties() {
try {
return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resource's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) {
try {
return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException)
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue())));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.exists}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Boolean> exists() {
try {
return existsWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Response<Boolean>> existsWithResponse() {
try {
return blockBlobAsyncClient.existsWithResponse()
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group,
String owner) {
try {
return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList,
String group, String owner, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) {
try {
return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList,
PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
String permissionsString = permissions == null ? null : permissions.toString();
String accessControlListString =
accessControlList == null
? null
: PathAccessControlEntry.serializeList(accessControlList);
return this.dataLakeStorage.paths().setAccessControlWithRestResponseAsync(null, owner, group, permissionsString,
accessControlListString, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource access control.
*/
public Mono<PathAccessControl> getAccessControl() {
try {
return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false,
* user identity values returned as Azure Active Directory Object IDs. Default value is false.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource access control.
*/
public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions,
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions, Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().getPropertiesWithRestResponseAsync(
PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathAccessControl(
PathAccessControlEntry.parseList(response.getDeserializedHeaders().getAcl()),
PathPermissions.parseSymbolic(response.getDeserializedHeaders().getPermissions()),
response.getDeserializedHeaders().getGroup(), response.getDeserializedHeaders().getOwner())));
}
/**
* Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath The path of the destination relative to the file system name
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* DataLakePathAsyncClient} used to interact with the path created.
*/
Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Context context) {
destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions()
: destinationRequestConditions;
sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions()
: sourceRequestConditions;
SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions()
.setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince())
.setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince())
.setSourceIfMatch(sourceRequestConditions.getIfMatch())
.setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch());
LeaseAccessConditions destLac = new LeaseAccessConditions()
.setLeaseId(destinationRequestConditions.getLeaseId());
ModifiedAccessConditions destMac = new ModifiedAccessConditions()
.setIfMatch(destinationRequestConditions.getIfMatch())
.setIfNoneMatch(destinationRequestConditions.getIfNoneMatch())
.setIfModifiedSince(destinationRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince());
DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath);
String renameSource = "/" + this.fileSystemName + "/" + pathName;
return dataLakePathAsyncClient.dataLakeStorage.paths().createWithRestResponseAsync(null /* pathResourceType */,
null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(),
null /* metadata */, null /* permissions */, null /* umask */, null /* request id */, null /* timeout */,
null /* pathHttpHeaders */, destLac, destMac, sourceConditions, context)
.map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient));
}
/**
* Takes in a destination and creates a DataLakePathAsyncClient with a new path
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return A DataLakePathAsyncClient
*/
/**
* Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return An updated SpecializedBlobClientBuilder
*/
SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(),
"blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new SpecializedBlobClientBuilder()
.pipeline(getHttpPipeline())
.endpoint(newBlobEndpoint)
.serviceVersion(BlobServiceVersion.getLatest());
}
BlockBlobAsyncClient getBlockBlobAsyncClient() {
return this.blockBlobAsyncClient;
}
/**
* Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}.
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS.
* </p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
* @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values.
* @see DataLakeServiceAsyncClient
* to get a user delegation key.
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues,
UserDelegationKey userDelegationKey) {
return blockBlobAsyncClient.generateUserDelegationSas(
Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues),
Transforms.toBlobUserDelegationKey(userDelegationKey));
}
/**
* Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}
* Note : The client must be authenticated via {@link StorageSharedKeyCredential}
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) {
return blockBlobAsyncClient.generateSas(Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues));
}
} |
Sure, I can change that | DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) {
if (CoreUtils.isNullOrEmpty(destinationFileSystem)) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"'destinationFileSystem' can not be set to null"));
}
if (CoreUtils.isNullOrEmpty(destinationPath)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null"));
}
String newDfsEndpoint = BlobUrlParts.parse(getPathUrl())
.setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new DataLakePathAsyncClient(getHttpPipeline(), newDfsEndpoint, serviceVersion, accountName,
destinationFileSystem, destinationPath, pathResourceType,
prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient());
} | throw logger.logExceptionAsError(new IllegalArgumentException( | DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
if (CoreUtils.isNullOrEmpty(destinationPath)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null"));
}
String newDfsEndpoint = BlobUrlParts.parse(getPathUrl())
.setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new DataLakePathAsyncClient(getHttpPipeline(), newDfsEndpoint, serviceVersion, accountName,
destinationFileSystem, destinationPath, pathResourceType,
prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient());
} | class DataLakePathAsyncClient {
private final ClientLogger logger = new ClientLogger(DataLakePathAsyncClient.class);
protected final DataLakeStorageClientImpl dataLakeStorage;
private final String accountName;
private final String fileSystemName;
private final String pathName;
private final DataLakeServiceVersion serviceVersion;
protected PathResourceType pathResourceType;
protected final BlockBlobAsyncClient blockBlobAsyncClient;
/**
* Package-private constructor for use by {@link DataLakePathClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param fileSystemName The file system name.
* @param pathName The path name.
* @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient}
*/
DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion,
String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType,
BlockBlobAsyncClient blockBlobAsyncClient) {
this.dataLakeStorage = new DataLakeStorageClientBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion.getVersion())
.build();
this.serviceVersion = serviceVersion;
this.accountName = accountName;
this.fileSystemName = fileSystemName;
this.pathName = Utility.urlEncode(Utility.urlDecode(pathName));
this.pathResourceType = pathResourceType;
this.blockBlobAsyncClient = blockBlobAsyncClient;
}
/**
* Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values.
*
* @param metadata The metadata.
*
* @return The metadata represented as a String.
*/
static String buildMetadataString(Map<String, String> metadata) {
StringBuilder sb = new StringBuilder();
if (!CoreUtils.isNullOrEmpty(metadata)) {
for (final Map.Entry<String, String> entry : metadata.entrySet()) {
if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) {
throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
} else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) {
throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
}
/*
The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata
will work as normal. Doing this encoding for the customers preserves the existing behavior of
metadata.
*/
sb.append(entry.getKey()).append('=')
.append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)),
StandardCharsets.UTF_8)).append(',');
}
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
/**
* Gets the URL of the object represented by this client on the Data Lake service.
*
* @return the URL.
*/
String getPathUrl() {
return dataLakeStorage.getUrl();
}
/**
* Gets the associated account name.
*
* @return Account name associated with this storage resource.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the name of the File System in which this object lives.
*
* @return The name of the File System.
*/
public String getFileSystemName() {
return fileSystemName;
}
/**
* Gets the path of this object, not including the name of the resource itself.
*
* @return The path of the object.
*/
String getObjectPath() {
return (pathName == null) ? null : Utility.urlDecode(pathName);
}
/**
* Gets the name of this object, not including its full path.
*
* @return The name of the object.
*/
String getObjectName() {
String[] pathParts = getObjectPath().split("/");
return pathParts[pathParts.length - 1];
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
public HttpPipeline getHttpPipeline() {
return dataLakeStorage.getHttpPipeline();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public DataLakeServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Creates a resource. By default this method will not overwrite an existing path.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create}
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param overwrite Whether or not to overwrite, should data exist on the file.
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create(boolean overwrite) {
try {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param permissions POSIX access permissions for the resource owner, the resource owning group, and others.
* @param umask Restricts permissions of the resource to be created.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* PathItem}.
*/
public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers,
Map<String, String> metadata, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().createWithRestResponseAsync(resourceType, null, null, null, null,
buildMetadataString(metadata), permissions, umask, null, null, headers, lac, mac, null, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param recursive Whether or not to delete all paths beneath the directory.
* @param requestConditions {@link DataLakeRequestConditions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing containing status code and HTTP headers
*/
Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().deleteWithRestResponseAsync(recursive, null, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, null));
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @return A reactive response signalling completion.
*/
public Mono<Void> setMetadata(Map<String, String> metadata) {
try {
return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setMetadataWithResponse(metadata,
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @return A reactive response signalling completion.
*/
public Mono<Void> setHttpHeaders(PathHttpHeaders headers) {
try {
return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resources's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers),
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resources's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<PathProperties> getProperties() {
try {
return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resource's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) {
try {
return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException)
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue())));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.exists}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Boolean> exists() {
try {
return existsWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Response<Boolean>> existsWithResponse() {
try {
return blockBlobAsyncClient.existsWithResponse()
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group,
String owner) {
try {
return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList,
String group, String owner, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) {
try {
return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList,
PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
String permissionsString = permissions == null ? null : permissions.toString();
String accessControlListString =
accessControlList == null
? null
: PathAccessControlEntry.serializeList(accessControlList);
return this.dataLakeStorage.paths().setAccessControlWithRestResponseAsync(null, owner, group, permissionsString,
accessControlListString, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource access control.
*/
public Mono<PathAccessControl> getAccessControl() {
try {
return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false,
* user identity values returned as Azure Active Directory Object IDs. Default value is false.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource access control.
*/
public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions,
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions, Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().getPropertiesWithRestResponseAsync(
PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathAccessControl(
PathAccessControlEntry.parseList(response.getDeserializedHeaders().getAcl()),
PathPermissions.parseSymbolic(response.getDeserializedHeaders().getPermissions()),
response.getDeserializedHeaders().getGroup(), response.getDeserializedHeaders().getOwner())));
}
/**
* Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param destinationFileSystem The file system of the destination within the account.
* @param destinationPath The path of the destination relative to the file system name
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* DataLakePathAsyncClient} used to interact with the path created.
*/
Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Context context) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions()
: destinationRequestConditions;
sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions()
: sourceRequestConditions;
SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions()
.setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince())
.setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince())
.setSourceIfMatch(sourceRequestConditions.getIfMatch())
.setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch());
LeaseAccessConditions destLac = new LeaseAccessConditions()
.setLeaseId(destinationRequestConditions.getLeaseId());
ModifiedAccessConditions destMac = new ModifiedAccessConditions()
.setIfMatch(destinationRequestConditions.getIfMatch())
.setIfNoneMatch(destinationRequestConditions.getIfNoneMatch())
.setIfModifiedSince(destinationRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince());
DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath);
String renameSource = "/" + this.fileSystemName + "/" + pathName;
return dataLakePathAsyncClient.dataLakeStorage.paths().createWithRestResponseAsync(null /* pathResourceType */,
null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(),
null /* metadata */, null /* permissions */, null /* umask */, null /* request id */, null /* timeout */,
null /* pathHttpHeaders */, destLac, destMac, sourceConditions, context)
.map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient));
}
/**
* Takes in a destination and creates a DataLakePathAsyncClient with a new path
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return A DataLakePathAsyncClient
*/
/**
* Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return An updated SpecializedBlobClientBuilder
*/
SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) {
String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(),
"blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new SpecializedBlobClientBuilder()
.pipeline(getHttpPipeline())
.endpoint(newBlobEndpoint)
.serviceVersion(BlobServiceVersion.getLatest());
}
BlockBlobAsyncClient getBlockBlobAsyncClient() {
return this.blockBlobAsyncClient;
}
/**
* Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}.
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS.
* </p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
* @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values.
* @see DataLakeServiceAsyncClient
* to get a user delegation key.
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues,
UserDelegationKey userDelegationKey) {
return blockBlobAsyncClient.generateUserDelegationSas(
Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues),
Transforms.toBlobUserDelegationKey(userDelegationKey));
}
/**
* Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}
* Note : The client must be authenticated via {@link StorageSharedKeyCredential}
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) {
return blockBlobAsyncClient.generateSas(Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues));
}
} | class DataLakePathAsyncClient {
private final ClientLogger logger = new ClientLogger(DataLakePathAsyncClient.class);
protected final DataLakeStorageClientImpl dataLakeStorage;
private final String accountName;
private final String fileSystemName;
private final String pathName;
private final DataLakeServiceVersion serviceVersion;
protected PathResourceType pathResourceType;
protected final BlockBlobAsyncClient blockBlobAsyncClient;
/**
* Package-private constructor for use by {@link DataLakePathClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param fileSystemName The file system name.
* @param pathName The path name.
* @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient}
*/
DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion,
String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType,
BlockBlobAsyncClient blockBlobAsyncClient) {
this.dataLakeStorage = new DataLakeStorageClientBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion.getVersion())
.build();
this.serviceVersion = serviceVersion;
this.accountName = accountName;
this.fileSystemName = fileSystemName;
this.pathName = Utility.urlEncode(Utility.urlDecode(pathName));
this.pathResourceType = pathResourceType;
this.blockBlobAsyncClient = blockBlobAsyncClient;
}
/**
* Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values.
*
* @param metadata The metadata.
*
* @return The metadata represented as a String.
*/
static String buildMetadataString(Map<String, String> metadata) {
StringBuilder sb = new StringBuilder();
if (!CoreUtils.isNullOrEmpty(metadata)) {
for (final Map.Entry<String, String> entry : metadata.entrySet()) {
if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) {
throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
} else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) {
throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, "
+ "empty, or whitespace.");
}
/*
The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata
will work as normal. Doing this encoding for the customers preserves the existing behavior of
metadata.
*/
sb.append(entry.getKey()).append('=')
.append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)),
StandardCharsets.UTF_8)).append(',');
}
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
/**
* Gets the URL of the object represented by this client on the Data Lake service.
*
* @return the URL.
*/
String getPathUrl() {
return dataLakeStorage.getUrl();
}
/**
* Gets the associated account name.
*
* @return Account name associated with this storage resource.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the name of the File System in which this object lives.
*
* @return The name of the File System.
*/
public String getFileSystemName() {
return fileSystemName;
}
/**
* Gets the path of this object, not including the name of the resource itself.
*
* @return The path of the object.
*/
String getObjectPath() {
return (pathName == null) ? null : Utility.urlDecode(pathName);
}
/**
* Gets the name of this object, not including its full path.
*
* @return The name of the object.
*/
String getObjectName() {
String[] pathParts = getObjectPath().split("/");
return pathParts[pathParts.length - 1];
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
public HttpPipeline getHttpPipeline() {
return dataLakeStorage.getHttpPipeline();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public DataLakeServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Creates a resource. By default this method will not overwrite an existing path.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create}
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.create
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param overwrite Whether or not to overwrite, should data exist on the file.
*
* @return A reactive response containing information about the created resource.
*/
public Mono<PathInfo> create(boolean overwrite) {
try {
DataLakeRequestConditions requestConditions = new DataLakeRequestConditions();
if (!overwrite) {
requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse
*
* <p>For more information see the
* <a href="https:
* Docs</a></p>
*
* @param permissions POSIX access permissions for the resource owner, the resource owning group, and others.
* @param umask Restricts permissions of the resource to be created.
* @param headers {@link PathHttpHeaders}
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* PathItem}.
*/
public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers,
Map<String, String> metadata, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType,
PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().createWithRestResponseAsync(resourceType, null, null, null, null,
buildMetadataString(metadata), permissions, umask, null, null, headers, lac, mac, null, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param recursive Whether or not to delete all paths beneath the directory.
* @param requestConditions {@link DataLakeRequestConditions}
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing containing status code and HTTP headers
*/
Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().deleteWithRestResponseAsync(recursive, null, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, null));
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @return A reactive response signalling completion.
*/
public Mono<Void> setMetadata(Map<String, String> metadata) {
try {
return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old
* values must be preserved, they must be downloaded and included in the call to this method.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata
*
* <p>For more information, see the
* <a href="https:
*
* @param metadata Metadata to associate with the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setMetadataWithResponse(metadata,
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @return A reactive response signalling completion.
*/
public Mono<Void> setHttpHeaders(PathHttpHeaders headers) {
try {
return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes a resources's HTTP header properties. If only one HTTP header is updated, the others will all be erased.
* In order to preserve existing values, they must be passed alongside the header being changed.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param headers {@link PathHttpHeaders}
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers,
DataLakeRequestConditions requestConditions) {
try {
return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers),
Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resources's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<PathProperties> getProperties() {
try {
return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the resource's metadata and properties.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource's properties and metadata.
*/
public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) {
try {
return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions))
.onErrorMap(DataLakeImplUtils::transformBlobStorageException)
.map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue())));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.exists}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Boolean> exists() {
try {
return existsWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Determines if the path this client represents exists in the cloud.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse}
*
* @return true if the path exists, false if it doesn't
*/
public Mono<Response<Boolean>> existsWithResponse() {
try {
return blockBlobAsyncClient.existsWithResponse()
.onErrorMap(DataLakeImplUtils::transformBlobStorageException);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group,
String owner) {
try {
return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the access control list, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param accessControlList A list of {@link PathAccessControlEntry} objects.
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList,
String group, String owner, DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @return A reactive response containing the resource info.
*/
public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) {
try {
return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the permissions, group and/or owner for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param permissions {@link PathPermissions}
* @param group The group of the resource.
* @param owner The owner of the resource.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource info.
*/
public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner,
requestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList,
PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions,
Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
String permissionsString = permissions == null ? null : permissions.toString();
String accessControlListString =
accessControlList == null
? null
: PathAccessControlEntry.serializeList(accessControlList);
return this.dataLakeStorage.paths().setAccessControlWithRestResponseAsync(null, owner, group, permissionsString,
accessControlListString, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(),
response.getDeserializedHeaders().getLastModified())));
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl}
*
* <p>For more information, see the
* <a href="https:
*
* @return A reactive response containing the resource access control.
*/
public Mono<PathAccessControl> getAccessControl() {
try {
return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Returns the access control for a resource.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse
*
* <p>For more information, see the
* <a href="https:
*
* @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false,
* user identity values returned as Azure Active Directory Object IDs. Default value is false.
* @param requestConditions {@link DataLakeRequestConditions}
* @return A reactive response containing the resource access control.
*/
public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions) {
try {
return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions,
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned,
DataLakeRequestConditions requestConditions, Context context) {
requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions;
LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId());
ModifiedAccessConditions mac = new ModifiedAccessConditions()
.setIfMatch(requestConditions.getIfMatch())
.setIfNoneMatch(requestConditions.getIfNoneMatch())
.setIfModifiedSince(requestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince());
return this.dataLakeStorage.paths().getPropertiesWithRestResponseAsync(
PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, null, null, lac, mac, context)
.map(response -> new SimpleResponse<>(response, new PathAccessControl(
PathAccessControlEntry.parseList(response.getDeserializedHeaders().getAcl()),
PathPermissions.parseSymbolic(response.getDeserializedHeaders().getPermissions()),
response.getDeserializedHeaders().getGroup(), response.getDeserializedHeaders().getOwner())));
}
/**
* Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient}
*
* @param destinationFileSystem The file system of the destination within the account.
* {@code null} for the current file system.
* @param destinationPath The path of the destination relative to the file system name
* @param sourceRequestConditions {@link DataLakeRequestConditions} against the source.
* @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* DataLakePathAsyncClient} used to interact with the path created.
*/
Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath,
DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions,
Context context) {
destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions()
: destinationRequestConditions;
sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions()
: sourceRequestConditions;
SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions()
.setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince())
.setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince())
.setSourceIfMatch(sourceRequestConditions.getIfMatch())
.setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch());
LeaseAccessConditions destLac = new LeaseAccessConditions()
.setLeaseId(destinationRequestConditions.getLeaseId());
ModifiedAccessConditions destMac = new ModifiedAccessConditions()
.setIfMatch(destinationRequestConditions.getIfMatch())
.setIfNoneMatch(destinationRequestConditions.getIfNoneMatch())
.setIfModifiedSince(destinationRequestConditions.getIfModifiedSince())
.setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince());
DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath);
String renameSource = "/" + this.fileSystemName + "/" + pathName;
return dataLakePathAsyncClient.dataLakeStorage.paths().createWithRestResponseAsync(null /* pathResourceType */,
null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(),
null /* metadata */, null /* permissions */, null /* umask */, null /* request id */, null /* timeout */,
null /* pathHttpHeaders */, destLac, destMac, sourceConditions, context)
.map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient));
}
/**
* Takes in a destination and creates a DataLakePathAsyncClient with a new path
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return A DataLakePathAsyncClient
*/
/**
* Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name
* @param destinationFileSystem The destination file system
* @param destinationPath The destination path
* @return An updated SpecializedBlobClientBuilder
*/
SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) {
if (destinationFileSystem == null) {
destinationFileSystem = getFileSystemName();
}
String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(),
"blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString();
return new SpecializedBlobClientBuilder()
.pipeline(getHttpPipeline())
.endpoint(newBlobEndpoint)
.serviceVersion(BlobServiceVersion.getLatest());
}
BlockBlobAsyncClient getBlockBlobAsyncClient() {
return this.blockBlobAsyncClient;
}
/**
* Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}.
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS.
* </p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
* @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values.
* @see DataLakeServiceAsyncClient
* to get a user delegation key.
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues,
UserDelegationKey userDelegationKey) {
return blockBlobAsyncClient.generateUserDelegationSas(
Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues),
Transforms.toBlobUserDelegationKey(userDelegationKey));
}
/**
* Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}
* Note : The client must be authenticated via {@link StorageSharedKeyCredential}
* <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas
*
* @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues}
*
* @return A {@code String} representing all SAS query parameters.
*/
public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) {
return blockBlobAsyncClient.generateSas(Transforms.toBlobSasValues(dataLakeServiceSasSignatureValues));
}
} |
You might want to consider moving the ternary operation inside the method call and doing it on one line? | public HttpClient build() {
OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null
? new OkHttpClient.Builder()
: this.okHttpClient.newBuilder();
for (Interceptor interceptor : this.networkInterceptors) {
httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor);
}
httpClientBuilder = (this.readTimeout != null)
? httpClientBuilder.readTimeout(this.readTimeout)
: httpClientBuilder.readTimeout(DEFAULT_READ_TIMEOUT);
httpClientBuilder = (this.connectionTimeout != null)
? httpClientBuilder.connectTimeout(this.connectionTimeout)
: httpClientBuilder.connectTimeout(DEFAULT_CONNECT_TIMEOUT);
if (this.connectionPool != null) {
httpClientBuilder = httpClientBuilder.connectionPool(connectionPool);
}
if (this.dispatcher != null) {
httpClientBuilder = httpClientBuilder.dispatcher(dispatcher);
}
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = (proxyOptions == null)
? ProxyOptions.loadFromConfiguration(buildConfiguration)
: proxyOptions;
if (buildProxyOptions != null) {
httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector(
mapProxyType(buildProxyOptions.getType(), logger), buildProxyOptions.getAddress(),
buildProxyOptions.getNonProxyHosts()));
if (proxyOptions.getUsername() != null) {
String basicAuthorizationHeader = Credentials.basic(buildProxyOptions.getUsername(),
buildProxyOptions.getPassword());
httpClientBuilder = httpClientBuilder.proxyAuthenticator((route, response) ->
response.request().newBuilder()
.header("Proxy-Authorization", basicAuthorizationHeader)
.build());
}
}
return new OkHttpAsyncHttpClient(httpClientBuilder.build());
} | : httpClientBuilder.readTimeout(DEFAULT_READ_TIMEOUT); | public HttpClient build() {
OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null
? new OkHttpClient.Builder()
: this.okHttpClient.newBuilder();
for (Interceptor interceptor : this.networkInterceptors) {
httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor);
}
httpClientBuilder = httpClientBuilder.readTimeout((readTimeout != null) ? readTimeout : DEFAULT_READ_TIMEOUT);
httpClientBuilder = (this.connectionTimeout != null)
? httpClientBuilder.connectTimeout(this.connectionTimeout)
: httpClientBuilder.connectTimeout(DEFAULT_CONNECT_TIMEOUT);
if (this.connectionPool != null) {
httpClientBuilder = httpClientBuilder.connectionPool(connectionPool);
}
if (this.dispatcher != null) {
httpClientBuilder = httpClientBuilder.dispatcher(dispatcher);
}
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = (proxyOptions == null)
? ProxyOptions.fromConfiguration(buildConfiguration)
: proxyOptions;
if (buildProxyOptions != null) {
httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector(
mapProxyType(buildProxyOptions.getType(), logger), buildProxyOptions.getAddress(),
buildProxyOptions.getNonProxyHosts()));
if (proxyOptions.getUsername() != null) {
String basicAuthorizationHeader = Credentials.basic(buildProxyOptions.getUsername(),
buildProxyOptions.getPassword());
httpClientBuilder = httpClientBuilder.proxyAuthenticator((route, response) ->
response.request().newBuilder()
.header("Proxy-Authorization", basicAuthorizationHeader)
.build());
}
}
return new OkHttpAsyncHttpClient(httpClientBuilder.build());
} | class OkHttpAsyncHttpClientBuilder {
private final ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class);
private final okhttp3.OkHttpClient okHttpClient;
private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(120);
private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(60);
private List<Interceptor> networkInterceptors = new ArrayList<>();
private Duration readTimeout;
private Duration connectionTimeout;
private ConnectionPool connectionPool;
private Dispatcher dispatcher;
private ProxyOptions proxyOptions;
private Configuration configuration;
/**
* Creates OkHttpAsyncHttpClientBuilder.
*/
public OkHttpAsyncHttpClientBuilder() {
this.okHttpClient = null;
}
/**
* Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient.
*
* @param okHttpClient the httpclient
*/
public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) {
this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null.");
}
/**
* Add a network layer interceptor to Http request pipeline.
*
* @param networkInterceptor the interceptor to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) {
Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null.");
this.networkInterceptors.add(networkInterceptor);
return this;
}
/**
* Add network layer interceptors to Http request pipeline.
*
* This replaces all previously-set interceptors.
*
* @param networkInterceptors the interceptors to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) {
this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null.");
return this;
}
/**
* Sets the read timeout.
*
* The default read timeout is 120 seconds.
*
* @param readTimeout the read timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
/**
* Sets the connection timeout.
*
* The default connection timeout is 60 seconds.
*
* @param connectionTimeout the connection timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
/**
* Sets the Http connection pool.
*
* @param connectionPool the OkHttp connection pool to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) {
this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null.");
return this;
}
/**
* Sets the dispatcher that also composes the thread pool for executing HTTP requests.
*
* @param dispatcher the dispatcher to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) {
this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null.");
return this;
}
/**
* Sets the proxy.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.core.http.okhttp.OkHttpAsyncHttpClientBuilder.proxy
*
* @param proxyOptions The proxy configuration to use.
* @return the updated {@link OkHttpAsyncHttpClientBuilder} object
*/
public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated OkHttpAsyncHttpClientBuilder object.
*/
public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Build a HttpClient with current configurations.
*
* @return a {@link HttpClient}.
*/
/*
* Maps a 'ProxyOptions.Type' to a 'ProxyProvider.Proxy', if the type is unknown or cannot be mapped an
* IllegalStateException will be thrown.
*/
private static Proxy.Type mapProxyType(ProxyOptions.Type type, ClientLogger logger) {
switch (type) {
case HTTP:
return Proxy.Type.HTTP;
case SOCKS4:
case SOCKS5:
return Proxy.Type.SOCKS;
default:
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Unknown Proxy type '%s' in use. Not configuring OkHttp proxy.", type)));
}
}
} | class OkHttpAsyncHttpClientBuilder {
private final ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class);
private final okhttp3.OkHttpClient okHttpClient;
private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(120);
private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(60);
private List<Interceptor> networkInterceptors = new ArrayList<>();
private Duration readTimeout;
private Duration connectionTimeout;
private ConnectionPool connectionPool;
private Dispatcher dispatcher;
private ProxyOptions proxyOptions;
private Configuration configuration;
/**
* Creates OkHttpAsyncHttpClientBuilder.
*/
public OkHttpAsyncHttpClientBuilder() {
this.okHttpClient = null;
}
/**
* Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient.
*
* @param okHttpClient the httpclient
*/
public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) {
this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null.");
}
/**
* Add a network layer interceptor to Http request pipeline.
*
* @param networkInterceptor the interceptor to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) {
Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null.");
this.networkInterceptors.add(networkInterceptor);
return this;
}
/**
* Add network layer interceptors to Http request pipeline.
*
* This replaces all previously-set interceptors.
*
* @param networkInterceptors the interceptors to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) {
this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null.");
return this;
}
/**
* Sets the read timeout.
*
* The default read timeout is 120 seconds.
*
* @param readTimeout the read timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
/**
* Sets the connection timeout.
*
* The default connection timeout is 60 seconds.
*
* @param connectionTimeout the connection timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
/**
* Sets the Http connection pool.
*
* @param connectionPool the OkHttp connection pool to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) {
this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null.");
return this;
}
/**
* Sets the dispatcher that also composes the thread pool for executing HTTP requests.
*
* @param dispatcher the dispatcher to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) {
this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null.");
return this;
}
/**
* Sets the proxy.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.core.http.okhttp.OkHttpAsyncHttpClientBuilder.proxy
*
* @param proxyOptions The proxy configuration to use.
* @return the updated {@link OkHttpAsyncHttpClientBuilder} object
*/
public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated OkHttpAsyncHttpClientBuilder object.
*/
public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Build a HttpClient with current configurations.
*
* @return a {@link HttpClient}.
*/
/*
* Maps a 'ProxyOptions.Type' to a 'ProxyProvider.Proxy', if the type is unknown or cannot be mapped an
* IllegalStateException will be thrown.
*/
private static Proxy.Type mapProxyType(ProxyOptions.Type type, ClientLogger logger) {
Objects.requireNonNull(type, "'ProxyOptions.getType()' cannot be null.");
switch (type) {
case HTTP:
return Proxy.Type.HTTP;
case SOCKS4:
case SOCKS5:
return Proxy.Type.SOCKS;
default:
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Unknown proxy type '%s' in use. Use a proxy type from 'ProxyOptions.Type'.", type)));
}
}
} |
Done | public HttpClient build() {
OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null
? new OkHttpClient.Builder()
: this.okHttpClient.newBuilder();
for (Interceptor interceptor : this.networkInterceptors) {
httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor);
}
httpClientBuilder = (this.readTimeout != null)
? httpClientBuilder.readTimeout(this.readTimeout)
: httpClientBuilder.readTimeout(DEFAULT_READ_TIMEOUT);
httpClientBuilder = (this.connectionTimeout != null)
? httpClientBuilder.connectTimeout(this.connectionTimeout)
: httpClientBuilder.connectTimeout(DEFAULT_CONNECT_TIMEOUT);
if (this.connectionPool != null) {
httpClientBuilder = httpClientBuilder.connectionPool(connectionPool);
}
if (this.dispatcher != null) {
httpClientBuilder = httpClientBuilder.dispatcher(dispatcher);
}
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = (proxyOptions == null)
? ProxyOptions.loadFromConfiguration(buildConfiguration)
: proxyOptions;
if (buildProxyOptions != null) {
httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector(
mapProxyType(buildProxyOptions.getType(), logger), buildProxyOptions.getAddress(),
buildProxyOptions.getNonProxyHosts()));
if (proxyOptions.getUsername() != null) {
String basicAuthorizationHeader = Credentials.basic(buildProxyOptions.getUsername(),
buildProxyOptions.getPassword());
httpClientBuilder = httpClientBuilder.proxyAuthenticator((route, response) ->
response.request().newBuilder()
.header("Proxy-Authorization", basicAuthorizationHeader)
.build());
}
}
return new OkHttpAsyncHttpClient(httpClientBuilder.build());
} | : httpClientBuilder.readTimeout(DEFAULT_READ_TIMEOUT); | public HttpClient build() {
OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null
? new OkHttpClient.Builder()
: this.okHttpClient.newBuilder();
for (Interceptor interceptor : this.networkInterceptors) {
httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor);
}
httpClientBuilder = httpClientBuilder.readTimeout((readTimeout != null) ? readTimeout : DEFAULT_READ_TIMEOUT);
httpClientBuilder = (this.connectionTimeout != null)
? httpClientBuilder.connectTimeout(this.connectionTimeout)
: httpClientBuilder.connectTimeout(DEFAULT_CONNECT_TIMEOUT);
if (this.connectionPool != null) {
httpClientBuilder = httpClientBuilder.connectionPool(connectionPool);
}
if (this.dispatcher != null) {
httpClientBuilder = httpClientBuilder.dispatcher(dispatcher);
}
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = (proxyOptions == null)
? ProxyOptions.fromConfiguration(buildConfiguration)
: proxyOptions;
if (buildProxyOptions != null) {
httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector(
mapProxyType(buildProxyOptions.getType(), logger), buildProxyOptions.getAddress(),
buildProxyOptions.getNonProxyHosts()));
if (proxyOptions.getUsername() != null) {
String basicAuthorizationHeader = Credentials.basic(buildProxyOptions.getUsername(),
buildProxyOptions.getPassword());
httpClientBuilder = httpClientBuilder.proxyAuthenticator((route, response) ->
response.request().newBuilder()
.header("Proxy-Authorization", basicAuthorizationHeader)
.build());
}
}
return new OkHttpAsyncHttpClient(httpClientBuilder.build());
} | class OkHttpAsyncHttpClientBuilder {
private final ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class);
private final okhttp3.OkHttpClient okHttpClient;
private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(120);
private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(60);
private List<Interceptor> networkInterceptors = new ArrayList<>();
private Duration readTimeout;
private Duration connectionTimeout;
private ConnectionPool connectionPool;
private Dispatcher dispatcher;
private ProxyOptions proxyOptions;
private Configuration configuration;
/**
* Creates OkHttpAsyncHttpClientBuilder.
*/
public OkHttpAsyncHttpClientBuilder() {
this.okHttpClient = null;
}
/**
* Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient.
*
* @param okHttpClient the httpclient
*/
public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) {
this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null.");
}
/**
* Add a network layer interceptor to Http request pipeline.
*
* @param networkInterceptor the interceptor to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) {
Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null.");
this.networkInterceptors.add(networkInterceptor);
return this;
}
/**
* Add network layer interceptors to Http request pipeline.
*
* This replaces all previously-set interceptors.
*
* @param networkInterceptors the interceptors to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) {
this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null.");
return this;
}
/**
* Sets the read timeout.
*
* The default read timeout is 120 seconds.
*
* @param readTimeout the read timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
/**
* Sets the connection timeout.
*
* The default connection timeout is 60 seconds.
*
* @param connectionTimeout the connection timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
/**
* Sets the Http connection pool.
*
* @param connectionPool the OkHttp connection pool to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) {
this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null.");
return this;
}
/**
* Sets the dispatcher that also composes the thread pool for executing HTTP requests.
*
* @param dispatcher the dispatcher to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) {
this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null.");
return this;
}
/**
* Sets the proxy.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.core.http.okhttp.OkHttpAsyncHttpClientBuilder.proxy
*
* @param proxyOptions The proxy configuration to use.
* @return the updated {@link OkHttpAsyncHttpClientBuilder} object
*/
public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated OkHttpAsyncHttpClientBuilder object.
*/
public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Build a HttpClient with current configurations.
*
* @return a {@link HttpClient}.
*/
/*
* Maps a 'ProxyOptions.Type' to a 'ProxyProvider.Proxy', if the type is unknown or cannot be mapped an
* IllegalStateException will be thrown.
*/
private static Proxy.Type mapProxyType(ProxyOptions.Type type, ClientLogger logger) {
switch (type) {
case HTTP:
return Proxy.Type.HTTP;
case SOCKS4:
case SOCKS5:
return Proxy.Type.SOCKS;
default:
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Unknown Proxy type '%s' in use. Not configuring OkHttp proxy.", type)));
}
}
} | class OkHttpAsyncHttpClientBuilder {
private final ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class);
private final okhttp3.OkHttpClient okHttpClient;
private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(120);
private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(60);
private List<Interceptor> networkInterceptors = new ArrayList<>();
private Duration readTimeout;
private Duration connectionTimeout;
private ConnectionPool connectionPool;
private Dispatcher dispatcher;
private ProxyOptions proxyOptions;
private Configuration configuration;
/**
* Creates OkHttpAsyncHttpClientBuilder.
*/
public OkHttpAsyncHttpClientBuilder() {
this.okHttpClient = null;
}
/**
* Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient.
*
* @param okHttpClient the httpclient
*/
public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) {
this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null.");
}
/**
* Add a network layer interceptor to Http request pipeline.
*
* @param networkInterceptor the interceptor to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) {
Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null.");
this.networkInterceptors.add(networkInterceptor);
return this;
}
/**
* Add network layer interceptors to Http request pipeline.
*
* This replaces all previously-set interceptors.
*
* @param networkInterceptors the interceptors to add
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) {
this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null.");
return this;
}
/**
* Sets the read timeout.
*
* The default read timeout is 120 seconds.
*
* @param readTimeout the read timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
/**
* Sets the connection timeout.
*
* The default connection timeout is 60 seconds.
*
* @param connectionTimeout the connection timeout
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
/**
* Sets the Http connection pool.
*
* @param connectionPool the OkHttp connection pool to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) {
this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null.");
return this;
}
/**
* Sets the dispatcher that also composes the thread pool for executing HTTP requests.
*
* @param dispatcher the dispatcher to use
* @return the updated OkHttpAsyncHttpClientBuilder object
*/
public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) {
this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null.");
return this;
}
/**
* Sets the proxy.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.core.http.okhttp.OkHttpAsyncHttpClientBuilder.proxy
*
* @param proxyOptions The proxy configuration to use.
* @return the updated {@link OkHttpAsyncHttpClientBuilder} object
*/
public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated OkHttpAsyncHttpClientBuilder object.
*/
public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Build a HttpClient with current configurations.
*
* @return a {@link HttpClient}.
*/
/*
* Maps a 'ProxyOptions.Type' to a 'ProxyProvider.Proxy', if the type is unknown or cannot be mapped an
* IllegalStateException will be thrown.
*/
private static Proxy.Type mapProxyType(ProxyOptions.Type type, ClientLogger logger) {
Objects.requireNonNull(type, "'ProxyOptions.getType()' cannot be null.");
switch (type) {
case HTTP:
return Proxy.Type.HTTP;
case SOCKS4:
case SOCKS5:
return Proxy.Type.SOCKS;
default:
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Unknown proxy type '%s' in use. Use a proxy type from 'ProxyOptions.Type'.", type)));
}
}
} |
Don't we want Math.ceil() of these values so we round up? | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)),
response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)), | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics(response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient |
It might be better to default this to -1 so that customers can tell the difference between not set and empty share | public ShareStatistics(int shareUsageInGB) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = 0;
} | this.shareUsageInBytes = 0; | public ShareStatistics(int shareUsageInGB) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = -1;
} | class ShareStatistics {
private final int shareUsageInGB;
private final long shareUsageInBytes;
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
*/
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
* @param shareUsageInBytes Size in bytes of the Share
*/
public ShareStatistics(int shareUsageInGB, long shareUsageInBytes) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = shareUsageInBytes;
}
/**
* @return the size in GB of the Share
*/
public int getShareUsageInGB() {
return shareUsageInGB;
}
/**
* @return the size in bytes of the Share
*/
public long getShareUsageInBytes() {
return shareUsageInBytes;
}
} | class ShareStatistics {
private final int shareUsageInGB;
private final long shareUsageInBytes;
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
*/
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInBytes Size in bytes of the Share
*/
public ShareStatistics(long shareUsageInBytes) {
this.shareUsageInGB = (int) Math.ceil((double) shareUsageInBytes / Constants.GB);
this.shareUsageInBytes = shareUsageInBytes;
}
/**
* @return the size in GB of the Share
*/
public int getShareUsageInGB() {
return shareUsageInGB;
}
/**
* @return the size in bytes of the Share
*/
public long getShareUsageInBytes() {
return shareUsageInBytes;
}
} |
It might also be better for the new constructor to just take a long and we calculate the share usage in bytes in the constructor. | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)),
response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)), | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics(response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient |
Done | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)),
response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | new ShareStatistics((int) (response.getValue().getShareUsageBytes() / (Constants.GB)), | private Response<ShareStatistics> mapGetStatisticsResponse(SharesGetStatisticsResponse response) {
ShareStatistics shareStatistics =
new ShareStatistics(response.getValue().getShareUsageBytes());
return new SimpleResponse<>(response, shareStatistics);
} | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient | class ShareAsyncClient {
private final ClientLogger logger = new ClientLogger(ShareAsyncClient.class);
private final AzureFileStorageImpl azureFileStorageClient;
private final String shareName;
private final String snapshot;
private final String accountName;
private final ShareServiceVersion serviceVersion;
/**
* Creates a ShareAsyncClient that sends requests to the storage share at {@link AzureFileStorageImpl
* endpoint}. Each service call goes through the {@link HttpPipeline pipeline} in the
* {@code azureFileStorageClient}.
*
* @param client Client that interacts with the service interfaces
* @param shareName Name of the share
*/
ShareAsyncClient(AzureFileStorageImpl client, String shareName, String snapshot, String accountName,
ShareServiceVersion serviceVersion) {
Objects.requireNonNull(shareName, "'shareName' cannot be null.");
this.shareName = shareName;
this.snapshot = snapshot;
this.accountName = accountName;
this.azureFileStorageClient = client;
this.serviceVersion = serviceVersion;
}
/**
* Get the url of the storage share client.
*
* @return the url of the Storage Share.
*/
public String getShareUrl() {
StringBuilder shareUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/").append(shareName);
if (snapshot != null) {
shareUrlString.append("?snapshot=").append(snapshot);
}
return shareUrlString.toString();
}
/**
* Gets the service version the client is using.
*
* @return the service version the client is using.
*/
public ShareServiceVersion getServiceVersion() {
return serviceVersion;
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @return a {@link ShareDirectoryAsyncClient} that interacts with the root directory in the share
*/
public ShareDirectoryAsyncClient getRootDirectoryClient() {
return getDirectoryClient("");
}
/**
* Constructs a {@link ShareDirectoryAsyncClient} that interacts with the specified directory.
*
* <p>If the directory doesn't exist in the share {@link ShareDirectoryAsyncClient
* azureFileStorageClient will need to be called before interaction with the directory can happen.</p>
*
* @param directoryName Name of the directory
* @return a {@link ShareDirectoryAsyncClient} that interacts with the directory in the share
*/
public ShareDirectoryAsyncClient getDirectoryClient(String directoryName) {
return new ShareDirectoryAsyncClient(azureFileStorageClient, shareName, directoryName, snapshot, accountName,
serviceVersion);
}
/**
* Constructs a {@link ShareFileAsyncClient} that interacts with the specified file.
*
* <p>If the file doesn't exist in the share {@link ShareFileAsyncClient |
Done | public ShareStatistics(int shareUsageInGB) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = 0;
} | this.shareUsageInBytes = 0; | public ShareStatistics(int shareUsageInGB) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = -1;
} | class ShareStatistics {
private final int shareUsageInGB;
private final long shareUsageInBytes;
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
*/
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
* @param shareUsageInBytes Size in bytes of the Share
*/
public ShareStatistics(int shareUsageInGB, long shareUsageInBytes) {
this.shareUsageInGB = shareUsageInGB;
this.shareUsageInBytes = shareUsageInBytes;
}
/**
* @return the size in GB of the Share
*/
public int getShareUsageInGB() {
return shareUsageInGB;
}
/**
* @return the size in bytes of the Share
*/
public long getShareUsageInBytes() {
return shareUsageInBytes;
}
} | class ShareStatistics {
private final int shareUsageInGB;
private final long shareUsageInBytes;
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInGB Size in GB of the Share
*/
/**
* Creates an instance of storage statistics for a Share.
*
* @param shareUsageInBytes Size in bytes of the Share
*/
public ShareStatistics(long shareUsageInBytes) {
this.shareUsageInGB = (int) Math.ceil((double) shareUsageInBytes / Constants.GB);
this.shareUsageInBytes = shareUsageInBytes;
}
/**
* @return the size in GB of the Share
*/
public int getShareUsageInGB() {
return shareUsageInGB;
}
/**
* @return the size in bytes of the Share
*/
public long getShareUsageInBytes() {
return shareUsageInBytes;
}
} |
Would be good to add a unit test case for this change. | public void subscribe(CoreSubscriber<? super AsyncPollResponse<T, U>> actual) {
this.oneTimeActivationMono
.flatMapMany(ignored -> {
final PollResponse<T> activationResponse = this.rootContext.getActivationResponse();
if (activationResponse.getStatus().isComplete()) {
return Flux.just(new AsyncPollResponse<>(this.rootContext,
this.cancelOperation,
this.fetchResultOperation));
} else {
return this.pollingLoop();
}
})
.subscribe(actual);
} | .flatMapMany(ignored -> { | public void subscribe(CoreSubscriber<? super AsyncPollResponse<T, U>> actual) {
this.oneTimeActivationMono
.flatMapMany(ignored -> {
final PollResponse<T> activationResponse = this.rootContext.getActivationResponse();
if (activationResponse.getStatus().isComplete()) {
return Flux.just(new AsyncPollResponse<>(this.rootContext,
this.cancelOperation,
this.fetchResultOperation));
} else {
return this.pollingLoop();
}
})
.subscribe(actual);
} | class PollerFlux<T, U> extends Flux<AsyncPollResponse<T, U>> {
private final ClientLogger logger = new ClientLogger(PollerFlux.class);
private final PollingContext<T> rootContext = new PollingContext<>();
private final Duration defaultPollInterval;
private final Function<PollingContext<T>, Mono<T>> activationOperation;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperationEx;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation;
private final BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation;
private final Function<PollingContext<T>, Mono<U>> fetchResultOperation;
private final Mono<Boolean> oneTimeActivationMono;
private volatile boolean activated = false;
private volatile int activationGuardFlag = 0;
@SuppressWarnings({"rawtypes"})
private final AtomicIntegerFieldUpdater<PollerFlux> guardActivationCall =
AtomicIntegerFieldUpdater.newUpdater(PollerFlux.class, "activationGuardFlag");
/**
* Creates PollerFlux.
*
* @param defaultPollInterval the default polling interval
* @param activationOperation the activation operation to be invoked at most once across all subscriptions,
* this parameter is required, if there is no specific activation work to be
* done then invocation should return Mono.empty(), this operation will be called
* with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation, this parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long-running operation
* if service supports cancellation, this parameter is required and if service does not
* support cancellation then the implementer should return Mono.error with an error message
* indicating absence of cancellation support, the operation will be called with current
* {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long-running operation if service support it, this parameter is required and
* operation will be called current {@link PollingContext}, if service does not have an
* api to fetch final result and if final result is same as final poll response value
* then implementer can choose to simply return value from provided final poll response.
*/
public PollerFlux(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<T>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
Objects.requireNonNull(defaultPollInterval, "'defaultPollInterval' cannot be null.");
if (defaultPollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = defaultPollInterval;
this.activationOperation = Objects.requireNonNull(activationOperation,
"'activationOperation' cannot be null.");
this.activationOperationEx = null;
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = oneTimeActivationMono();
}
/**
* Creates PollerFlux.
*
* PollerFlux obtained from this factory method uses an activationOperation which returns a Mono that
* emits {@link PollResponse}, response holds the result. The PollerFlux created from constructor uses
* an activationOperation which returns a Mono that directly emits result. Since the first variant of
* PollerFlux has access to the response, it can skip the polling loop if the response indicate that
* LRO is completed. Whereas the second PollerFlux variant calls pollFunction at least once.
*
* @param defaultPollInterval the default polling interval
* @param activationOperation the activation operation to be invoked at most once across all subscriptions,
* this parameter is required, if there is no specific activation work to be
* done then invocation should return Mono.empty(), this operation will be called
* with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation, this parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long-running operation
* if service supports cancellation, this parameter is required and if service does not
* support cancellation then the implementer should return Mono.error with an error message
* indicating absence of cancellation support, the operation will be called with current
* {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long-running operation if service support it, this parameter is required and
* operation will be called current {@link PollingContext}, if service does not have an
* api to fetch final result and if final result is same as final poll response value
* then implementer can choose to simply return value from provided final poll response.
*
* @param <T> The type of poll response value.
* @param <U> The type of the final result of long-running operation.
* @return PollerFlux
*/
public static <T, U> PollerFlux<T, U>
create(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
return new PollerFlux<>(defaultPollInterval,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation,
true);
}
private PollerFlux(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation,
boolean ignored) {
Objects.requireNonNull(defaultPollInterval, "'defaultPollInterval' cannot be null.");
if (defaultPollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = defaultPollInterval;
this.activationOperation = null;
this.activationOperationEx = Objects.requireNonNull(activationOperation,
"'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = oneTimeActivationMono();
}
@Override
/**
* @return a synchronous blocking poller.
*/
public SyncPoller<T, U> getSyncPoller() {
return new DefaultSyncPoller<>(this.defaultPollInterval,
this.activationOperation,
this.pollOperation,
this.cancelOperation,
this.fetchResultOperation);
}
/**
* Returns a decorated Mono, upon subscription it internally subscribes to the Mono that perform one
* time activation. The decorated Mono caches the result of activation operation as a PollResponse
* in {@code rootContext}, this cached response will be used by any future subscriptions.
*
* Note: we can't use standard cache() operator, because it caches error terminal signal and forward
* it to any future subscriptions. If there is an error from activation Mono then we don't want to cache
* it but just forward it to subscription that initiated the failed activation. For any future subscriptions
* we don't want to forward the past error instead activation should again invoked. Once a subscription
* received a successful event from activation Mono then we cache it in {@code rootContext} and will be used
* by any future subscriptions.
*
* The decorated Mono also handles concurrent calls to activation. Only one of them will be able to call
* activation and other subscriptions will keep resubscribing until it sees a activation happened or get a chance
* to call activation as the one previously entered the critical section got an error on activation.
* @return a one time activation mono
*/
@SuppressWarnings("unchecked")
private Mono<Boolean> oneTimeActivationMono() {
return Mono.defer(() -> {
if (this.activated) {
return Mono.just(true);
}
if (this.guardActivationCall.compareAndSet(this, 0, 1)) {
final boolean isEx = this.activationOperationEx != null;
final Mono<?> activationMono;
try {
activationMono = isEx
? this.activationOperationEx.apply(this.rootContext)
: this.activationOperation.apply(this.rootContext);
} catch (RuntimeException e) {
this.guardActivationCall.compareAndSet(this, 1, 0);
return FluxUtil.monoError(this.logger, e);
}
Mono<PollResponse<T>> activationMonoResponse;
if (isEx) {
activationMonoResponse = activationMono.map(o -> (PollResponse<T>) o);
} else {
activationMonoResponse = activationMono
.map(o -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, (T) o));
}
return activationMonoResponse
.switchIfEmpty(Mono.defer(() ->
Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, null))))
.map(activationResponse -> {
this.rootContext.setOnetimeActivationResponse(activationResponse);
this.activated = true;
return true;
})
.doOnError(throwable -> this.guardActivationCall.compareAndSet(this, 1, 0));
} else {
return Mono.empty();
}
}).repeatWhenEmpty((Flux<Long> longFlux) -> longFlux.concatMap(ignored -> Flux.just(true)));
}
/**
* Do the polling until it reaches a terminal state.
*
* @return a Flux that emits polling event.
*/
private Flux<AsyncPollResponse<T, U>> pollingLoop() {
return Flux.using(
() -> this.rootContext.copy(),
cxt -> Mono.defer(() -> this.pollOperation.apply(cxt))
.delaySubscription(getDelay(cxt.getLatestResponse()))
.switchIfEmpty(Mono.error(new IllegalStateException("PollOperation returned Mono.empty().")))
.repeat()
.takeUntil(currentPollResponse -> currentPollResponse.getStatus().isComplete())
.onErrorResume(throwable -> {
logger.warning("Received an error from pollOperation. Any error from pollOperation "
+ "will be ignored and polling will be continued. Error:" + throwable.getMessage());
return Mono.empty();
})
.concatMap(currentPollResponse -> {
cxt.setLatestResponse(currentPollResponse);
return Mono.just(new AsyncPollResponse<>(cxt,
this.cancelOperation,
this.fetchResultOperation));
}),
cxt -> { });
}
/**
* Get the duration to wait before making next poll attempt.
*
* @param pollResponse the poll response to retrieve delay duration from
* @return the delay
*/
private Duration getDelay(PollResponse<T> pollResponse) {
Duration retryAfter = pollResponse.getRetryAfter();
if (retryAfter == null) {
return this.defaultPollInterval;
} else {
return retryAfter.compareTo(Duration.ZERO) > 0
? retryAfter
: this.defaultPollInterval;
}
}
} | class PollerFlux<T, U> extends Flux<AsyncPollResponse<T, U>> {
private final ClientLogger logger = new ClientLogger(PollerFlux.class);
private final PollingContext<T> rootContext = new PollingContext<>();
private final Duration defaultPollInterval;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation;
private final BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation;
private final Function<PollingContext<T>, Mono<U>> fetchResultOperation;
private final Mono<Boolean> oneTimeActivationMono;
private final Function<PollingContext<T>, PollResponse<T>> syncActivationOperation;
/**
* Creates PollerFlux.
*
* @param pollInterval the polling interval
* @param activationOperation the activation operation to activate (start) the long running operation.
* This operation will be invoked at most once across all subscriptions. This parameter is required.
* If there is no specific activation work to be done then invocation should return Mono.empty(),
* this operation will be called with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation. This parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long running operation
* if service supports cancellation. This parameter is required. If service does not support cancellation
* then the implementer should return Mono.error with an error message indicating absence of cancellation
* support. The operation will be called with current {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long running operation if service support it. This parameter is required and operation will be called
* current {@link PollingContext}. If service does not have an api to fetch final result and if final result
* is same as final poll response value then implementer can choose to simply return value from provided
* final poll response.
*/
public PollerFlux(Duration pollInterval,
Function<PollingContext<T>, Mono<T>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
Objects.requireNonNull(pollInterval, "'pollInterval' cannot be null.");
if (pollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = pollInterval;
Objects.requireNonNull(activationOperation, "'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = new OneTimeActivation<>(this.rootContext,
activationOperation,
activationResult -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, activationResult)).getMono();
this.syncActivationOperation =
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, activationOperation.apply(cxt).block());
}
/**
* Creates PollerFlux.
*
* This create method differs from the PollerFlux constructor in that the constructor uses an
* activationOperation which returns a Mono that emits result, the create method uses an activationOperation
* which returns a Mono that emits {@link PollResponse}. The {@link PollResponse} holds the result.
* If the {@link PollResponse} from the activationOperation indicate that long running operation is
* completed then the pollOperation will not be called.
*
* @param pollInterval the polling interval
* @param activationOperation the activation operation to activate (start) the long running operation.
* This operation will be invoked at most once across all subscriptions. This parameter is required.
* If there is no specific activation work to be done then invocation should return Mono.empty(),
* this operation will be called with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation. This parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long running operation
* if service supports cancellation. This parameter is required. If service does not support cancellation
* then the implementer should return Mono.error with an error message indicating absence of cancellation
* support. The operation will be called with current {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long running operation if service support it. This parameter is required and operation will be called
* current {@link PollingContext}. If service does not have an api to fetch final result and if final result
* is same as final poll response value then implementer can choose to simply return value from provided
* final poll response.
*
* @param <T> The type of poll response value.
* @param <U> The type of the final result of long running operation.
* @return PollerFlux
*/
public static <T, U> PollerFlux<T, U>
create(Duration pollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
return new PollerFlux<>(pollInterval,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation,
true);
}
private PollerFlux(Duration pollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation,
boolean ignored) {
Objects.requireNonNull(pollInterval, "'pollInterval' cannot be null.");
if (pollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'pollInterval' is not allowed."));
}
this.defaultPollInterval = pollInterval;
Objects.requireNonNull(activationOperation, "'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = new OneTimeActivation<>(this.rootContext,
activationOperation,
Function.identity()).getMono();
this.syncActivationOperation = cxt -> activationOperation.apply(cxt).block();
}
@Override
/**
* @return a synchronous blocking poller.
*/
public SyncPoller<T, U> getSyncPoller() {
return new DefaultSyncPoller<>(this.defaultPollInterval,
this.syncActivationOperation,
this.pollOperation,
this.cancelOperation,
this.fetchResultOperation);
}
/**
* Do the polling until it reaches a terminal state.
*
* @return a Flux that emits polling event.
*/
private Flux<AsyncPollResponse<T, U>> pollingLoop() {
return Flux.using(
() -> this.rootContext.copy(),
cxt -> Mono.defer(() -> this.pollOperation.apply(cxt))
.delaySubscription(getDelay(cxt.getLatestResponse()))
.switchIfEmpty(Mono.error(new IllegalStateException("PollOperation returned Mono.empty().")))
.repeat()
.takeUntil(currentPollResponse -> currentPollResponse.getStatus().isComplete())
.onErrorResume(throwable -> {
logger.warning("Received an error from pollOperation. Any error from pollOperation "
+ "will be ignored and polling will be continued. Error:" + throwable.getMessage());
return Mono.empty();
})
.concatMap(currentPollResponse -> {
cxt.setLatestResponse(currentPollResponse);
return Mono.just(new AsyncPollResponse<>(cxt,
this.cancelOperation,
this.fetchResultOperation));
}),
cxt -> { });
}
/**
* Get the duration to wait before making next poll attempt.
*
* @param pollResponse the poll response to retrieve delay duration from
* @return the delay
*/
private Duration getDelay(PollResponse<T> pollResponse) {
Duration retryAfter = pollResponse.getRetryAfter();
if (retryAfter == null) {
return this.defaultPollInterval;
} else {
return retryAfter.compareTo(Duration.ZERO) > 0
? retryAfter
: this.defaultPollInterval;
}
}
/**
* A utility to get One-Time-Executable-Mono that execute an activation function at most once.
*
* When subscribed to such a Mono it internally subscribes to a Mono that perform an activation
* function. The One-Time-Executable-Mono caches the result of activation function as a PollResponse
* in {@code rootContext}, this cached response will be used by any future subscriptions.
*
* Note: The standard cache() operator can't be used to achieve one time execution, because it caches
* error terminal signal and forward it to any future subscriptions. If there is an error while executing
* activation function then error should not be cached but it should be forward it to subscription that
* initiated the failed activation. For any future subscriptions such past error should not be delivered
* instead activation function should again invoked. Once a subscription result in successful execution
* of activation function then it will be cached in {@code rootContext} and will be used by any future
* subscriptions.
*
* The One-Time-Executable-Mono handles concurrent calls to activation. Only one of them will be able
* to execute the activation function and other subscriptions will keep resubscribing until it sees
* a activation happened or get a chance to call activation as the one previously entered the critical
* section got an error on activation.
*
* @param <V> The type of value in poll response.
* @param <R> The type of the activation operation result.
*/
private class OneTimeActivation<V, R> {
private final PollingContext<V> rootContext;
private final Function<PollingContext<V>, Mono<R>> activationFunction;
private final Function<R, PollResponse<V>> activationPollResponseMapper;
private volatile boolean activated = false;
private final AtomicBoolean guardActivation = new AtomicBoolean(false);
/**
* Creates OneTimeActivation.
*
* @param rootContext the root context to store PollResponse holding activation result
* @param activationFunction function upon call return a Mono representing activation work
* @param activationPollResponseMapper mapper to map result of activation work execution to PollResponse
*/
OneTimeActivation(PollingContext<V> rootContext,
Function<PollingContext<V>, Mono<R>> activationFunction,
Function<R, PollResponse<V>> activationPollResponseMapper) {
this.rootContext = rootContext;
this.activationFunction = activationFunction;
this.activationPollResponseMapper = activationPollResponseMapper;
}
/**
* Get the mono containing activation work which on subscription executed only once.
*
* @return the one time executable mono
*/
Mono<Boolean> getMono() {
return Mono.defer(() -> {
if (this.activated) {
return Mono.just(true);
}
if (this.guardActivation.compareAndSet(false, true)) {
final Mono<R> activationMono;
try {
activationMono = this.activationFunction.apply(this.rootContext);
} catch (RuntimeException e) {
this.guardActivation.set(false);
return FluxUtil.monoError(logger, e);
}
return activationMono
.map(this.activationPollResponseMapper)
.switchIfEmpty(Mono.defer(() ->
Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, null))))
.map(activationResponse -> {
this.rootContext.setOnetimeActivationResponse(activationResponse);
this.activated = true;
return true;
})
.doOnError(throwable -> this.guardActivation.set(false));
} else {
return Mono.empty();
}
})
.repeatWhenEmpty((Flux<Long> longFlux) -> longFlux.concatMap(ignored -> Flux.just(true)));
}
}
} |
tests added | public void subscribe(CoreSubscriber<? super AsyncPollResponse<T, U>> actual) {
this.oneTimeActivationMono
.flatMapMany(ignored -> {
final PollResponse<T> activationResponse = this.rootContext.getActivationResponse();
if (activationResponse.getStatus().isComplete()) {
return Flux.just(new AsyncPollResponse<>(this.rootContext,
this.cancelOperation,
this.fetchResultOperation));
} else {
return this.pollingLoop();
}
})
.subscribe(actual);
} | .flatMapMany(ignored -> { | public void subscribe(CoreSubscriber<? super AsyncPollResponse<T, U>> actual) {
this.oneTimeActivationMono
.flatMapMany(ignored -> {
final PollResponse<T> activationResponse = this.rootContext.getActivationResponse();
if (activationResponse.getStatus().isComplete()) {
return Flux.just(new AsyncPollResponse<>(this.rootContext,
this.cancelOperation,
this.fetchResultOperation));
} else {
return this.pollingLoop();
}
})
.subscribe(actual);
} | class PollerFlux<T, U> extends Flux<AsyncPollResponse<T, U>> {
private final ClientLogger logger = new ClientLogger(PollerFlux.class);
private final PollingContext<T> rootContext = new PollingContext<>();
private final Duration defaultPollInterval;
private final Function<PollingContext<T>, Mono<T>> activationOperation;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperationEx;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation;
private final BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation;
private final Function<PollingContext<T>, Mono<U>> fetchResultOperation;
private final Mono<Boolean> oneTimeActivationMono;
private volatile boolean activated = false;
private volatile int activationGuardFlag = 0;
@SuppressWarnings({"rawtypes"})
private final AtomicIntegerFieldUpdater<PollerFlux> guardActivationCall =
AtomicIntegerFieldUpdater.newUpdater(PollerFlux.class, "activationGuardFlag");
/**
* Creates PollerFlux.
*
* @param defaultPollInterval the default polling interval
* @param activationOperation the activation operation to be invoked at most once across all subscriptions,
* this parameter is required, if there is no specific activation work to be
* done then invocation should return Mono.empty(), this operation will be called
* with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation, this parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long-running operation
* if service supports cancellation, this parameter is required and if service does not
* support cancellation then the implementer should return Mono.error with an error message
* indicating absence of cancellation support, the operation will be called with current
* {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long-running operation if service support it, this parameter is required and
* operation will be called current {@link PollingContext}, if service does not have an
* api to fetch final result and if final result is same as final poll response value
* then implementer can choose to simply return value from provided final poll response.
*/
public PollerFlux(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<T>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
Objects.requireNonNull(defaultPollInterval, "'defaultPollInterval' cannot be null.");
if (defaultPollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = defaultPollInterval;
this.activationOperation = Objects.requireNonNull(activationOperation,
"'activationOperation' cannot be null.");
this.activationOperationEx = null;
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = oneTimeActivationMono();
}
/**
* Creates PollerFlux.
*
* PollerFlux obtained from this factory method uses an activationOperation which returns a Mono that
* emits {@link PollResponse}, response holds the result. The PollerFlux created from constructor uses
* an activationOperation which returns a Mono that directly emits result. Since the first variant of
* PollerFlux has access to the response, it can skip the polling loop if the response indicate that
* LRO is completed. Whereas the second PollerFlux variant calls pollFunction at least once.
*
* @param defaultPollInterval the default polling interval
* @param activationOperation the activation operation to be invoked at most once across all subscriptions,
* this parameter is required, if there is no specific activation work to be
* done then invocation should return Mono.empty(), this operation will be called
* with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation, this parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long-running operation
* if service supports cancellation, this parameter is required and if service does not
* support cancellation then the implementer should return Mono.error with an error message
* indicating absence of cancellation support, the operation will be called with current
* {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long-running operation if service support it, this parameter is required and
* operation will be called current {@link PollingContext}, if service does not have an
* api to fetch final result and if final result is same as final poll response value
* then implementer can choose to simply return value from provided final poll response.
*
* @param <T> The type of poll response value.
* @param <U> The type of the final result of long-running operation.
* @return PollerFlux
*/
public static <T, U> PollerFlux<T, U>
create(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
return new PollerFlux<>(defaultPollInterval,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation,
true);
}
private PollerFlux(Duration defaultPollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation,
boolean ignored) {
Objects.requireNonNull(defaultPollInterval, "'defaultPollInterval' cannot be null.");
if (defaultPollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = defaultPollInterval;
this.activationOperation = null;
this.activationOperationEx = Objects.requireNonNull(activationOperation,
"'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = oneTimeActivationMono();
}
@Override
/**
* @return a synchronous blocking poller.
*/
public SyncPoller<T, U> getSyncPoller() {
return new DefaultSyncPoller<>(this.defaultPollInterval,
this.activationOperation,
this.pollOperation,
this.cancelOperation,
this.fetchResultOperation);
}
/**
* Returns a decorated Mono, upon subscription it internally subscribes to the Mono that perform one
* time activation. The decorated Mono caches the result of activation operation as a PollResponse
* in {@code rootContext}, this cached response will be used by any future subscriptions.
*
* Note: we can't use standard cache() operator, because it caches error terminal signal and forward
* it to any future subscriptions. If there is an error from activation Mono then we don't want to cache
* it but just forward it to subscription that initiated the failed activation. For any future subscriptions
* we don't want to forward the past error instead activation should again invoked. Once a subscription
* received a successful event from activation Mono then we cache it in {@code rootContext} and will be used
* by any future subscriptions.
*
* The decorated Mono also handles concurrent calls to activation. Only one of them will be able to call
* activation and other subscriptions will keep resubscribing until it sees a activation happened or get a chance
* to call activation as the one previously entered the critical section got an error on activation.
* @return a one time activation mono
*/
@SuppressWarnings("unchecked")
private Mono<Boolean> oneTimeActivationMono() {
return Mono.defer(() -> {
if (this.activated) {
return Mono.just(true);
}
if (this.guardActivationCall.compareAndSet(this, 0, 1)) {
final boolean isEx = this.activationOperationEx != null;
final Mono<?> activationMono;
try {
activationMono = isEx
? this.activationOperationEx.apply(this.rootContext)
: this.activationOperation.apply(this.rootContext);
} catch (RuntimeException e) {
this.guardActivationCall.compareAndSet(this, 1, 0);
return FluxUtil.monoError(this.logger, e);
}
Mono<PollResponse<T>> activationMonoResponse;
if (isEx) {
activationMonoResponse = activationMono.map(o -> (PollResponse<T>) o);
} else {
activationMonoResponse = activationMono
.map(o -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, (T) o));
}
return activationMonoResponse
.switchIfEmpty(Mono.defer(() ->
Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, null))))
.map(activationResponse -> {
this.rootContext.setOnetimeActivationResponse(activationResponse);
this.activated = true;
return true;
})
.doOnError(throwable -> this.guardActivationCall.compareAndSet(this, 1, 0));
} else {
return Mono.empty();
}
}).repeatWhenEmpty((Flux<Long> longFlux) -> longFlux.concatMap(ignored -> Flux.just(true)));
}
/**
* Do the polling until it reaches a terminal state.
*
* @return a Flux that emits polling event.
*/
private Flux<AsyncPollResponse<T, U>> pollingLoop() {
return Flux.using(
() -> this.rootContext.copy(),
cxt -> Mono.defer(() -> this.pollOperation.apply(cxt))
.delaySubscription(getDelay(cxt.getLatestResponse()))
.switchIfEmpty(Mono.error(new IllegalStateException("PollOperation returned Mono.empty().")))
.repeat()
.takeUntil(currentPollResponse -> currentPollResponse.getStatus().isComplete())
.onErrorResume(throwable -> {
logger.warning("Received an error from pollOperation. Any error from pollOperation "
+ "will be ignored and polling will be continued. Error:" + throwable.getMessage());
return Mono.empty();
})
.concatMap(currentPollResponse -> {
cxt.setLatestResponse(currentPollResponse);
return Mono.just(new AsyncPollResponse<>(cxt,
this.cancelOperation,
this.fetchResultOperation));
}),
cxt -> { });
}
/**
* Get the duration to wait before making next poll attempt.
*
* @param pollResponse the poll response to retrieve delay duration from
* @return the delay
*/
private Duration getDelay(PollResponse<T> pollResponse) {
Duration retryAfter = pollResponse.getRetryAfter();
if (retryAfter == null) {
return this.defaultPollInterval;
} else {
return retryAfter.compareTo(Duration.ZERO) > 0
? retryAfter
: this.defaultPollInterval;
}
}
} | class PollerFlux<T, U> extends Flux<AsyncPollResponse<T, U>> {
private final ClientLogger logger = new ClientLogger(PollerFlux.class);
private final PollingContext<T> rootContext = new PollingContext<>();
private final Duration defaultPollInterval;
private final Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation;
private final BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation;
private final Function<PollingContext<T>, Mono<U>> fetchResultOperation;
private final Mono<Boolean> oneTimeActivationMono;
private final Function<PollingContext<T>, PollResponse<T>> syncActivationOperation;
/**
* Creates PollerFlux.
*
* @param pollInterval the polling interval
* @param activationOperation the activation operation to activate (start) the long running operation.
* This operation will be invoked at most once across all subscriptions. This parameter is required.
* If there is no specific activation work to be done then invocation should return Mono.empty(),
* this operation will be called with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation. This parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long running operation
* if service supports cancellation. This parameter is required. If service does not support cancellation
* then the implementer should return Mono.error with an error message indicating absence of cancellation
* support. The operation will be called with current {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long running operation if service support it. This parameter is required and operation will be called
* current {@link PollingContext}. If service does not have an api to fetch final result and if final result
* is same as final poll response value then implementer can choose to simply return value from provided
* final poll response.
*/
public PollerFlux(Duration pollInterval,
Function<PollingContext<T>, Mono<T>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
Objects.requireNonNull(pollInterval, "'pollInterval' cannot be null.");
if (pollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'defaultPollInterval' is not allowed."));
}
this.defaultPollInterval = pollInterval;
Objects.requireNonNull(activationOperation, "'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = new OneTimeActivation<>(this.rootContext,
activationOperation,
activationResult -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, activationResult)).getMono();
this.syncActivationOperation =
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, activationOperation.apply(cxt).block());
}
/**
* Creates PollerFlux.
*
* This create method differs from the PollerFlux constructor in that the constructor uses an
* activationOperation which returns a Mono that emits result, the create method uses an activationOperation
* which returns a Mono that emits {@link PollResponse}. The {@link PollResponse} holds the result.
* If the {@link PollResponse} from the activationOperation indicate that long running operation is
* completed then the pollOperation will not be called.
*
* @param pollInterval the polling interval
* @param activationOperation the activation operation to activate (start) the long running operation.
* This operation will be invoked at most once across all subscriptions. This parameter is required.
* If there is no specific activation work to be done then invocation should return Mono.empty(),
* this operation will be called with a new {@link PollingContext}.
* @param pollOperation the operation to poll the current state of long running operation. This parameter
* is required and the operation will be called with current {@link PollingContext}.
* @param cancelOperation a {@link Function} that represents the operation to cancel the long running operation
* if service supports cancellation. This parameter is required. If service does not support cancellation
* then the implementer should return Mono.error with an error message indicating absence of cancellation
* support. The operation will be called with current {@link PollingContext}.
* @param fetchResultOperation a {@link Function} that represents the operation to retrieve final result of
* the long running operation if service support it. This parameter is required and operation will be called
* current {@link PollingContext}. If service does not have an api to fetch final result and if final result
* is same as final poll response value then implementer can choose to simply return value from provided
* final poll response.
*
* @param <T> The type of poll response value.
* @param <U> The type of the final result of long running operation.
* @return PollerFlux
*/
public static <T, U> PollerFlux<T, U>
create(Duration pollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation) {
return new PollerFlux<>(pollInterval,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation,
true);
}
private PollerFlux(Duration pollInterval,
Function<PollingContext<T>, Mono<PollResponse<T>>> activationOperation,
Function<PollingContext<T>, Mono<PollResponse<T>>> pollOperation,
BiFunction<PollingContext<T>, PollResponse<T>, Mono<T>> cancelOperation,
Function<PollingContext<T>, Mono<U>> fetchResultOperation,
boolean ignored) {
Objects.requireNonNull(pollInterval, "'pollInterval' cannot be null.");
if (pollInterval.compareTo(Duration.ZERO) <= 0) {
throw logger.logExceptionAsWarning(new IllegalArgumentException(
"Negative or zero value for 'pollInterval' is not allowed."));
}
this.defaultPollInterval = pollInterval;
Objects.requireNonNull(activationOperation, "'activationOperation' cannot be null.");
this.pollOperation = Objects.requireNonNull(pollOperation, "'pollOperation' cannot be null.");
this.cancelOperation = Objects.requireNonNull(cancelOperation, "'cancelOperation' cannot be null.");
this.fetchResultOperation = Objects.requireNonNull(fetchResultOperation,
"'fetchResultOperation' cannot be null.");
this.oneTimeActivationMono = new OneTimeActivation<>(this.rootContext,
activationOperation,
Function.identity()).getMono();
this.syncActivationOperation = cxt -> activationOperation.apply(cxt).block();
}
@Override
/**
* @return a synchronous blocking poller.
*/
public SyncPoller<T, U> getSyncPoller() {
return new DefaultSyncPoller<>(this.defaultPollInterval,
this.syncActivationOperation,
this.pollOperation,
this.cancelOperation,
this.fetchResultOperation);
}
/**
* Do the polling until it reaches a terminal state.
*
* @return a Flux that emits polling event.
*/
private Flux<AsyncPollResponse<T, U>> pollingLoop() {
return Flux.using(
() -> this.rootContext.copy(),
cxt -> Mono.defer(() -> this.pollOperation.apply(cxt))
.delaySubscription(getDelay(cxt.getLatestResponse()))
.switchIfEmpty(Mono.error(new IllegalStateException("PollOperation returned Mono.empty().")))
.repeat()
.takeUntil(currentPollResponse -> currentPollResponse.getStatus().isComplete())
.onErrorResume(throwable -> {
logger.warning("Received an error from pollOperation. Any error from pollOperation "
+ "will be ignored and polling will be continued. Error:" + throwable.getMessage());
return Mono.empty();
})
.concatMap(currentPollResponse -> {
cxt.setLatestResponse(currentPollResponse);
return Mono.just(new AsyncPollResponse<>(cxt,
this.cancelOperation,
this.fetchResultOperation));
}),
cxt -> { });
}
/**
* Get the duration to wait before making next poll attempt.
*
* @param pollResponse the poll response to retrieve delay duration from
* @return the delay
*/
private Duration getDelay(PollResponse<T> pollResponse) {
Duration retryAfter = pollResponse.getRetryAfter();
if (retryAfter == null) {
return this.defaultPollInterval;
} else {
return retryAfter.compareTo(Duration.ZERO) > 0
? retryAfter
: this.defaultPollInterval;
}
}
/**
* A utility to get One-Time-Executable-Mono that execute an activation function at most once.
*
* When subscribed to such a Mono it internally subscribes to a Mono that perform an activation
* function. The One-Time-Executable-Mono caches the result of activation function as a PollResponse
* in {@code rootContext}, this cached response will be used by any future subscriptions.
*
* Note: The standard cache() operator can't be used to achieve one time execution, because it caches
* error terminal signal and forward it to any future subscriptions. If there is an error while executing
* activation function then error should not be cached but it should be forward it to subscription that
* initiated the failed activation. For any future subscriptions such past error should not be delivered
* instead activation function should again invoked. Once a subscription result in successful execution
* of activation function then it will be cached in {@code rootContext} and will be used by any future
* subscriptions.
*
* The One-Time-Executable-Mono handles concurrent calls to activation. Only one of them will be able
* to execute the activation function and other subscriptions will keep resubscribing until it sees
* a activation happened or get a chance to call activation as the one previously entered the critical
* section got an error on activation.
*
* @param <V> The type of value in poll response.
* @param <R> The type of the activation operation result.
*/
private class OneTimeActivation<V, R> {
private final PollingContext<V> rootContext;
private final Function<PollingContext<V>, Mono<R>> activationFunction;
private final Function<R, PollResponse<V>> activationPollResponseMapper;
private volatile boolean activated = false;
private final AtomicBoolean guardActivation = new AtomicBoolean(false);
/**
* Creates OneTimeActivation.
*
* @param rootContext the root context to store PollResponse holding activation result
* @param activationFunction function upon call return a Mono representing activation work
* @param activationPollResponseMapper mapper to map result of activation work execution to PollResponse
*/
OneTimeActivation(PollingContext<V> rootContext,
Function<PollingContext<V>, Mono<R>> activationFunction,
Function<R, PollResponse<V>> activationPollResponseMapper) {
this.rootContext = rootContext;
this.activationFunction = activationFunction;
this.activationPollResponseMapper = activationPollResponseMapper;
}
/**
* Get the mono containing activation work which on subscription executed only once.
*
* @return the one time executable mono
*/
Mono<Boolean> getMono() {
return Mono.defer(() -> {
if (this.activated) {
return Mono.just(true);
}
if (this.guardActivation.compareAndSet(false, true)) {
final Mono<R> activationMono;
try {
activationMono = this.activationFunction.apply(this.rootContext);
} catch (RuntimeException e) {
this.guardActivation.set(false);
return FluxUtil.monoError(logger, e);
}
return activationMono
.map(this.activationPollResponseMapper)
.switchIfEmpty(Mono.defer(() ->
Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, null))))
.map(activationResponse -> {
this.rootContext.setOnetimeActivationResponse(activationResponse);
this.activated = true;
return true;
})
.doOnError(throwable -> this.guardActivation.set(false));
} else {
return Mono.empty();
}
})
.repeatWhenEmpty((Flux<Long> longFlux) -> longFlux.concatMap(ignored -> Flux.just(true)));
}
}
} |
Passing in an empty string gives an invalid country hint error? | public void detectLanguageInvalidCountryHint() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage(""));
assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE));
} | assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); | public void detectLanguageInvalidCountryHint() {
Exception exception = assertThrows(TextAnalyticsException.class, () ->
client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE));
assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE));
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that an TextAnalyticsException is thrown when empty text is passed.
*/
@Test
public void detectLanguageEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint.
*/
@Test
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(400, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.80624294281005859);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.8);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.65);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.11472424095537814, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.1, 0.5, 0.4, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.99, 0.005, 0.005, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.005, 0.005, 0.99, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that an TextAnalyticsException is thrown for a empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.02, 0.91, 0.07, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
} | class TextAnalyticsClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsClient client;
@Override
protected void beforeTest() {
client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.pipeline(httpPipeline)
.buildClient());
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void detectLanguagesBatchInputShowStatistics() {
detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true,
getExpectedBatchDetectedLanguages(),
client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test Detect batch input languages.
*/
@Test
public void detectLanguagesBatchInput() {
detectLanguageRunner((inputs) -> validateDetectLanguage(false,
getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs)));
}
/**
* Test Detect batch languages for List of String input with country Hint.
*/
@Test
public void detectLanguagesBatchListCountryHint() {
detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(),
client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue()));
}
/**
* Test Detect batch languages for List of String input.
*/
@Test
public void detectLanguagesBatchStringInput() {
detectLanguageStringInputRunner((inputs) -> validateDetectLanguage(
false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs)));
}
/**
* Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages.
*/
@Test
public void detectSingleTextLanguage() {
DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 0.0);
List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage);
validateDetectedLanguages(
client.detectLanguage("This is a test English Text").getDetectedLanguages(), expectedLanguageList);
}
/**
* Verifies that an exception is thrown when null text is passed.
*/
@Test
public void detectLanguagesNullInput() {
assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null,
Context.NONE).getValue());
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty text input.
*/
@Test
public void detectLanguageEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
/**
* Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed.
*/
@Test
public void detectLanguageFaultyText() {
DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0);
validateDetectedLanguages(client.detectLanguage("!@
}
/**
* Verifies that a TextAnalyticsException is thrown for a text input with invalid country hint.
*/
@Test
/**
* Verifies that a bad request exception is returned for input documents with same IDs.
*/
@Test
public void detectLanguageDuplicateIdInput() {
detectLanguageDuplicateIdRunner((inputs, options) -> {
HttpResponseException response = assertThrows(HttpResponseException.class,
() -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE));
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode());
});
}
@Test
public void recognizeEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("Seattle", "Location", null, 26, 7, 0.0);
NamedEntity namedEntity2 = new NamedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Arrays.asList(namedEntity1, namedEntity2));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizeEntities("I had a wonderful trip to Seattle last week.").getNamedEntities());
}
@Test
public void recognizeEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizeEntitiesForFaultyText() {
assertEquals(client.recognizeEntities("!@
}
@Test
public void recognizeEntitiesBatchInputSingleError() {
recognizeBatchNamedEntitySingleErrorRunner((inputs) -> {
DocumentResultCollection<RecognizeEntitiesResult> l = client.recognizeBatchEntities(inputs);
for (RecognizeEntitiesResult recognizeEntitiesResult : l) {
Exception exception = assertThrows(TextAnalyticsException.class, () -> recognizeEntitiesResult.getNamedEntities());
assertTrue(exception.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE));
}
});
}
@Test
public void recognizeEntitiesForBatchInput() {
recognizeBatchNamedEntityRunner((inputs) -> validateNamedEntity(false,
getExpectedBatchNamedEntities(), client.recognizeBatchEntities(inputs)));
}
@Test
public void recognizeEntitiesForBatchInputShowStatistics() {
recognizeBatchNamedEntitiesShowStatsRunner((inputs, options) ->
validateNamedEntity(true, getExpectedBatchNamedEntities(),
client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeEntitiesForBatchStringInput() {
recognizeNamedEntityStringInputRunner((inputs) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(), client.recognizeEntities(inputs)));
}
@Test
public void recognizeEntitiesForListLanguageHint() {
recognizeNamedEntitiesLanguageHintRunner((inputs, language) ->
validateNamedEntity(false, getExpectedBatchNamedEntities(),
client.recognizeEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForTextInput() {
NamedEntity namedEntity1 = new NamedEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0);
RecognizeEntitiesResult recognizeEntitiesResultList = new RecognizeEntitiesResult("0", null, null, Collections.singletonList(namedEntity1));
validateNamedEntities(recognizeEntitiesResultList.getNamedEntities(),
client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.").getNamedEntities());
}
@Test
public void recognizePiiEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizePiiEntitiesForFaultyText() {
assertEquals(client.recognizePiiEntities("!@
}
@Test
public void recognizePiiEntitiesForBatchInput() {
recognizeBatchPiiRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForBatchInputShowStatistics() {
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
validatePiiEntity(true, getExpectedBatchPiiEntities(),
client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizePiiEntitiesForBatchStringInput() {
recognizePiiStringInputRunner((inputs) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs)));
}
@Test
public void recognizePiiEntitiesForListLanguageHint() {
recognizePiiLanguageHintRunner((inputs, language) ->
validatePiiEntity(false, getExpectedBatchPiiEntities(),
client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForTextInput() {
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26);
LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https:
RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResultList = new RecognizeLinkedEntitiesResult("0", null, null, Collections.singletonList(linkedEntity1));
validateLinkedEntities(recognizeLinkedEntitiesResultList.getLinkedEntities(), client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").getLinkedEntities());
}
@Test
public void recognizeLinkedEntitiesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void recognizeLinkedEntitiesForFaultyText() {
assertEquals(client.recognizeLinkedEntities("!@
}
@Test
public void recognizeLinkedEntitiesForBatchInput() {
recognizeBatchLinkedEntityRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForBatchInputShowStatistics() {
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
validateLinkedEntity(true, getExpectedBatchLinkedEntities(),
client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void recognizeLinkedEntitiesForBatchStringInput() {
recognizeLinkedStringInputRunner((inputs) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs)));
}
@Test
public void recognizeLinkedEntitiesForListLanguageHint() {
recognizeLinkedLanguageHintRunner((inputs, language) ->
validateLinkedEntity(false, getExpectedBatchLinkedEntities(),
client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForTextInput() {
validateKeyPhrases(Collections.singletonList("monde"),
client.extractKeyPhrases("Bonjour tout le monde.").getKeyPhrases());
}
@Test
public void extractKeyPhrasesForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
@Test
public void extractKeyPhrasesForFaultyText() {
assertEquals(client.extractKeyPhrases("!@
}
@Test
public void extractKeyPhrasesForBatchInput() {
extractBatchKeyPhrasesRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForBatchInputShowStatistics() {
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(),
client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue()));
}
@Test
public void extractKeyPhrasesForBatchStringInput() {
extractKeyPhrasesStringInputRunner((inputs) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs)));
}
@Test
public void extractKeyPhrasesForListLanguageHint() {
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(),
client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for a string input.
*/
@Test
public void analyseSentimentForTextInput() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.MIXED, 0.0, 0.0, 0.0, 66, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEGATIVE, 0.0, 0.0, 0.0, 31, 0),
new TextSentiment(TextSentimentClass.POSITIVE, 0.0, 0.0, 0.0, 35, 32));
AnalyzeSentimentResult analyzeSentimentResult =
client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.");
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Verifies that a TextAnalyticsException is thrown for an empty text input.
*/
@Test
public void analyseSentimentForEmptyText() {
Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment(""));
assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE));
}
/**
* Test analyzing sentiment for a faulty input text.
*/
@Test
public void analyseSentimentForFaultyText() {
final TextSentiment expectedDocumentSentiment = new TextSentiment(TextSentimentClass.NEUTRAL, 0.0, 0.0, 0.0, 5, 0);
final List<TextSentiment> expectedSentenceSentiments = Arrays.asList(
new TextSentiment(TextSentimentClass.NEUTRAL, 0.0, 0.0, 0.0, 1, 0),
new TextSentiment(TextSentimentClass.NEUTRAL, 0.0, 0.0, 0.0, 4, 1));
AnalyzeSentimentResult analyzeSentimentResult = client.analyzeSentiment("!@
validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult.getDocumentSentiment());
validateAnalysedSentenceSentiment(expectedSentenceSentiments, analyzeSentimentResult.getSentenceSentiments());
}
/**
* Test analyzing sentiment for a list of string input.
*/
@Test
public void analyseSentimentForBatchStringInput() {
analyseSentimentStringInputRunner(inputs ->
validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs)));
}
/**
* Test analyzing sentiment for a list of string input with language hint.
*/
@Test
public void analyseSentimentForListLanguageHint() {
analyseSentimentLanguageHintRunner((inputs, language) ->
validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue()));
}
/**
* Test analyzing sentiment for batch input.
*/
@Test
public void analyseSentimentForBatchInput() {
analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(),
client.analyzeBatchSentiment(inputs)));
}
/**
* Verify that we can get statistics on the collection result when given a batch input with options.
*/
@Test
public void analyseSentimentForBatchInputShowStatistics() {
analyseBatchSentimentShowStatsRunner((inputs, options) ->
validateSentiment(true, getExpectedBatchTextSentiment(),
client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue()));
}
/**
* Test client builder with valid subscription key
*/
@Test
public void validKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildClient();
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test client builder with invalid subscription key
*/
@Test
public void invalidKey() {
final TextAnalyticsClient client = createClientBuilder(getEndpoint(),
new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient();
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with valid subscription key but update to invalid key and make call to server.
*/
@Test
public void updateToInvalidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(getSubscriptionKey());
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(INVALID_KEY);
assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text"));
}
/**
* Test client with invalid subscription key but update to valid key and make call to server.
*/
@Test
public void updateToValidKey() {
final TextAnalyticsApiKeyCredential credential =
new TextAnalyticsApiKeyCredential(INVALID_KEY);
final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient();
credential.updateCredential(getSubscriptionKey());
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
client.detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for missing endpoint
*/
@Test
public void missingEndpoint() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.buildClient();
});
}
/**
* Test for null subscription key
*/
@Test
public void nullSubscriptionKey() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).subscriptionKey(null);
});
}
/**
* Test for null AAD credential
*/
@Test
public void nullAADCredential() {
assertThrows(NullPointerException.class, () -> {
final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder();
builder.endpoint(getEndpoint()).credential(null);
});
}
/**
* Test for null service version, which would take take the default service version by default
*/
@Test
public void nullServiceVersion() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.retryPolicy(new RetryPolicy())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.serviceVersion(null);
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
/**
* Test for default pipeline in client builder
*/
@Test
public void defaultPipeline() {
final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder()
.endpoint(getEndpoint())
.subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey()))
.configuration(Configuration.getGlobalConfiguration())
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
clientBuilder.httpClient(interceptorManager.getPlaybackClient());
} else {
clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build())
.addPolicy(interceptorManager.getRecordPolicy());
}
validateDetectedLanguages(Arrays.asList(new DetectedLanguage("English", "en", 1.0)),
clientBuilder.buildClient().detectLanguage("This is a test English Text").getDetectedLanguages());
}
} |
Is it possible to have a target without an error code a vice-versa? This seems like it could be cleaned up to check that both aren't 'null' at once and do a full string formatting. | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | } | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
`baseMessage` could be returned here instead of retrieving it from the super class again. | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | return super.getMessage(); | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Let's make 'ErrorCodeValue` a constant | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue", | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Let's make `target` a constant | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target); | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Actually, why isn't this in the format string? | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue", | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Actually, why isn't this in the format string? | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target); | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Could a `StringBuilder` be used instead of using two `String.format` calls? | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target); | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
Why do the other `String.format` changes use `Locale.ROOT` where this does not? | void throwExceptionIfError() {
if (this.isError()) {
throw logger.logExceptionAsError(new TextAnalyticsException(
String.format("Error in accessing the property on document id: %s, when %s returned with an error: %s",
this.id, this.getClass().getSimpleName(), this.error.getMessage()),
this.error.getCode().toString(), null));
}
} | this.id, this.getClass().getSimpleName(), this.error.getMessage()), | void throwExceptionIfError() {
if (this.isError()) {
throw logger.logExceptionAsError(new TextAnalyticsException(
String.format(Locale.ROOT,
"Error in accessing the property on document id: %s, when %s returned with an error: %s",
this.id, this.getClass().getSimpleName(), this.error.getMessage()),
this.error.getCode().toString(), null));
}
} | class DocumentResult {
private final String id;
private final TextDocumentStatistics textDocumentStatistics;
private final TextAnalyticsError error;
private final boolean isError;
private final ClientLogger logger = new ClientLogger(DocumentResult.class);
/**
* Create a {@code DocumentResult} model that maintains document id, information about the document payload,
* and document error
*
* @param id unique, non-empty document identifier
* @param textDocumentStatistics text document statistics
* @param error the document error.
*/
DocumentResult(String id, TextDocumentStatistics textDocumentStatistics, TextAnalyticsError error) {
this.id = id;
this.error = error;
this.isError = error != null;
this.textDocumentStatistics = textDocumentStatistics;
}
/**
* Get the document id
*
* @return the document id
*/
public String getId() {
return id;
}
/**
* Get the statistics of the text document
*
* @return the {@link TextDocumentStatistics} statistics of the text document
*/
public TextDocumentStatistics getStatistics() {
throwExceptionIfError();
return textDocumentStatistics;
}
/**
* Get the error of text document
*
* @return the error of text document
*/
public TextAnalyticsError getError() {
return error;
}
/**
* Get the boolean value indicates if the document result is error or not
*
* @return A boolean indicates if the document result is error or not
*/
public boolean isError() {
return isError;
}
/**
* Throw a {@link TextAnalyticsException} if result has isError true and when a non-error property was accessed.
*/
} | class DocumentResult {
private final ClientLogger logger = new ClientLogger(DocumentResult.class);
private final String id;
private final TextDocumentStatistics textDocumentStatistics;
private final TextAnalyticsError error;
private final boolean isError;
/**
* Create a {@code DocumentResult} model that maintains document id, information about the document payload,
* and document error
*
* @param id unique, non-empty document identifier
* @param textDocumentStatistics text document statistics
* @param error the document error.
*/
DocumentResult(String id, TextDocumentStatistics textDocumentStatistics, TextAnalyticsError error) {
this.id = id;
this.error = error;
this.isError = error != null;
this.textDocumentStatistics = textDocumentStatistics;
}
/**
* Get the document id
*
* @return the document id
*/
public String getId() {
return id;
}
/**
* Get the statistics of the text document
*
* @return the {@link TextDocumentStatistics} statistics of the text document
*/
public TextDocumentStatistics getStatistics() {
throwExceptionIfError();
return textDocumentStatistics;
}
/**
* Get the error of text document
*
* @return the error of text document
*/
public TextAnalyticsError getError() {
return error;
}
/**
* Get the boolean value indicates if the document result is error or not
*
* @return A boolean indicates if the document result is error or not
*/
public boolean isError() {
return isError;
}
/**
* Throw a {@link TextAnalyticsException} if result has isError true and when a non-error property was accessed.
*
*/
} |
It should always have error code but target can be null. Can clean it that way. | public String getMessage() {
String baseMessage = super.getMessage();
if (this.errorCodeValue == null) {
return super.getMessage();
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "ErrorCodeValue",
errorCodeValue);
}
if (this.target == null) {
return baseMessage;
} else {
baseMessage = String.format(Locale.ROOT, "%s %s: {%s}", baseMessage, "target", target);
}
return baseMessage;
} | } | public String getMessage() {
StringBuilder baseMessage = new StringBuilder().append(super.getMessage()).append(" ").append(ERROR_CODE)
.append(": {").append(errorCodeValue).append("}");
if (this.target == null) {
return baseMessage.toString();
} else {
return baseMessage.append(", ").append(TARGET).append(": {").append(target).append("}").toString();
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the String value of TextAnalyticsErrorCode for this exception.
*
* @return The String value of TextAnalyticsErrorCode for this exception.
*/
public String getErrorCodeValue() {
return errorCodeValue;
}
} | class TextAnalyticsException extends AzureException {
private static final long serialVersionUID = 21436310107606058L;
private static final String ERROR_CODE = "ErrorCodeValue";
private static final String TARGET = "target";
private final String errorCodeValue;
private final String target;
/**
* Initializes a new instance of the TextAnalyticsException class.
*
* @param message Text containing any additional details of the exception.
* @param errorCodeValue The service returned error code value.
* @param target The target for this exception.
*/
public TextAnalyticsException(String message, String errorCodeValue, String target) {
super(message);
this.errorCodeValue = errorCodeValue;
this.target = target;
}
@Override
/**
* Gets the target for this exception.
*
* @return The target for this exception.
*/
public String getTarget() {
return this.target;
}
/**
* Gets the TextAnalyticsErrorCode for this exception.
*
* @return The TextAnalyticsErrorCode for this exception.
*/
public TextAnalyticsErrorCode getErrorCodeValue() {
return TextAnalyticsErrorCode.fromString(errorCodeValue);
}
} |
nit: can use method reference instead ```suggestion .map(Transforms::processSingleResponseErrorResult); ``` | Mono<Response<AnalyzeSentimentResult>> analyzeSentimentWithResponse(String text, String language, Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");
return analyzeBatchSentimentWithResponse(
Collections.singletonList(new TextDocumentInput("0", text, language)), null, context)
.map(response -> processSingleResponseErrorResult(response));
} | .map(response -> processSingleResponseErrorResult(response)); | return analyzeBatchSentimentWithResponse(
Collections.singletonList(new TextDocumentInput("0", text, language)), null, context)
.map(Transforms::processSingleResponseErrorResult);
}
Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentWithResponse(
List<String> textInputs, String language, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");
List<TextDocumentInput> documentInputs = mapByIndex(textInputs, (index, value) ->
new TextDocumentInput(index, value, language));
return analyzeBatchSentimentWithResponse(documentInputs, null, context);
} | class AnalyzeSentimentAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment
* analysis endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
Mono<Response<AnalyzeSentimentResult>> analyzeSentimentWithResponse(String text, String language, Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");
Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeBatchSentimentWithResponse(
List<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");
final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput()
.setDocuments(toMultiLanguageInput(textInputs));
return service.sentimentWithRestResponseAsync(
batchInput,
options == null ? null : options.getModelVersion(),
options == null ? null : options.showStatistics(), context)
.doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString()))
.doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response))
.doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error))
.map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue())));
}
/**
* Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}.
*
* @param sentimentResponse the {@link SentimentResponse} returned by the service.
*
* @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection(
final SentimentResponse sentimentResponse) {
List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) {
analyzeSentimentResults.add(convertToTextSentimentResult(documentSentiment));
}
for (DocumentError documentError : sentimentResponse.getErrors()) {
final com.azure.ai.textanalytics.models.TextAnalyticsError error =
toTextAnalyticsError(documentError.getError());
analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null,
error, null, null));
}
return new DocumentResultCollection<>(analyzeSentimentResults,
sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null
: toBatchStatistics(sentimentResponse.getStatistics()));
}
/**
* Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}.
*
* @param documentSentiment the {@link DocumentSentiment} returned by the service.
*
* @return the {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private AnalyzeSentimentResult convertToTextSentimentResult(final DocumentSentiment documentSentiment) {
final TextSentimentClass documentSentimentClass = TextSentimentClass.fromString(documentSentiment.
getSentiment().toString());
if (documentSentimentClass == null) {
logger.logExceptionAsWarning(
new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.",
documentSentiment.getSentiment())));
}
final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores();
final List<TextSentiment> sentenceSentimentTexts = documentSentiment.getSentences().stream()
.map(sentenceSentiment -> {
TextSentimentClass sentimentClass = TextSentimentClass.fromString(sentenceSentiment
.getSentiment().toString());
if (sentimentClass == null) {
logger.logExceptionAsWarning(
new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.",
sentenceSentiment.getSentiment())));
}
SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores();
return new TextSentiment(sentimentClass, confidenceScorePerSentence.getNegative(),
confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive(),
sentenceSentiment.getLength(), sentenceSentiment.getOffset());
}).collect(Collectors.toList());
return new AnalyzeSentimentResult(documentSentiment.getId(),
documentSentiment.getStatistics() == null ? null
: toTextDocumentStatistics(documentSentiment.getStatistics()), null,
new TextSentiment(documentSentimentClass, confidenceScorePerLabel.getNegative(),
confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive(),
sentenceSentimentTexts.stream().mapToInt(TextSentiment::getLength).sum(), 0),
sentenceSentimentTexts);
}
} | class AnalyzeSentimentAsyncClient {
private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment
* analysis endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
Mono<Response<AnalyzeSentimentResult>> analyzeSentimentWithResponse(String text, String language, Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");
Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeBatchSentimentWithResponse(
List<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");
final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput()
.setDocuments(toMultiLanguageInput(textInputs));
return service.sentimentWithRestResponseAsync(
batchInput,
options == null ? null : options.getModelVersion(),
options == null ? null : options.showStatistics(), context)
.doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString()))
.doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response))
.doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error))
.map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue())));
}
/**
* Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}.
*
* @param sentimentResponse the {@link SentimentResponse} returned by the service.
*
* @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection(
final SentimentResponse sentimentResponse) {
List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) {
analyzeSentimentResults.add(convertToTextSentimentResult(documentSentiment));
}
for (DocumentError documentError : sentimentResponse.getErrors()) {
final com.azure.ai.textanalytics.models.TextAnalyticsError error =
toTextAnalyticsError(documentError.getError());
analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null,
error, null, null));
}
return new DocumentResultCollection<>(analyzeSentimentResults,
sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null
: toBatchStatistics(sentimentResponse.getStatistics()));
}
/**
* Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}.
*
* @param documentSentiment the {@link DocumentSentiment} returned by the service.
*
* @return the {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private AnalyzeSentimentResult convertToTextSentimentResult(final DocumentSentiment documentSentiment) {
final TextSentimentClass documentSentimentClass = TextSentimentClass.fromString(documentSentiment.
getSentiment().toString());
if (documentSentimentClass == null) {
logger.logExceptionAsWarning(
new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.",
documentSentiment.getSentiment())));
}
final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores();
final List<TextSentiment> sentenceSentimentTexts = documentSentiment.getSentences().stream()
.map(sentenceSentiment -> {
TextSentimentClass sentimentClass = TextSentimentClass.fromString(sentenceSentiment
.getSentiment().toString());
if (sentimentClass == null) {
logger.logExceptionAsWarning(
new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.",
sentenceSentiment.getSentiment())));
}
SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores();
return new TextSentiment(sentimentClass, confidenceScorePerSentence.getNegative(),
confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive(),
sentenceSentiment.getLength(), sentenceSentiment.getOffset());
}).collect(Collectors.toList());
return new AnalyzeSentimentResult(documentSentiment.getId(),
documentSentiment.getStatistics() == null ? null
: toTextDocumentStatistics(documentSentiment.getStatistics()), null,
new TextSentiment(documentSentimentClass, confidenceScorePerLabel.getNegative(),
confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive(),
sentenceSentimentTexts.stream().mapToInt(TextSentiment::getLength).sum(), 0),
sentenceSentimentTexts);
}
} |
Shouldn't you be asserting the `UNWRAP_KEY` permission? | Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed "
+ "for key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get());
});
} | if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { | Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed "
+ "for key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
AtomicReference<JsonWebKey> key;
private final CryptographyService service;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private String keyCollection;
private final String keyId;
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param key the key to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) {
Objects.requireNonNull(key, "The key vault key is required.");
JsonWebKey jsonWebKey = key.getKey();
Objects.requireNonNull(jsonWebKey, "The Json web key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("Json Web Key is not valid");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("Json Web Key's key operations property is not configured");
}
if (key.getKeyType() == null) {
throw new IllegalArgumentException("Json Web Key's key type property is not configured");
}
this.key = new AtomicReference<>(jsonWebKey);
this.keyId = key.getId();
service = RestProxy.create(CryptographyService.class, pipeline);
if (!Strings.isNullOrEmpty(key.getId())) {
unpackAndValidateId(key.getId());
cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service);
} else {
cryptographyServiceClient = null;
}
initializeCryptoClients();
}
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param keyId THe Azure Key vault key identifier to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
service = RestProxy.create(CryptographyService.class, pipeline);
cryptographyServiceClient = new CryptographyServiceClient(keyId, service);
this.key = null;
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) {
localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) {
localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else if (key.get().getKeyType().equals(OCT)) {
localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(context -> getKeyWithResponse(context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
return cryptographyServiceClient.getKey(context);
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a
* single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public
* portion of the key is used for encryption. This operation requires the keys/encrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
* @throws ResourceNotFoundException if the key cannot be found for encryption.
* @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code plainText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
try {
return withContext(context -> encrypt(algorithm, plaintext, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(algorithm, plaintext, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing "
+ "permission/not supported for key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get());
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to
* be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the
* keys/decrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the
* specified encrypted content. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
* @return A {@link Mono} containing the decrypted blob.
* @throws ResourceNotFoundException if the key cannot be found for decryption.
* @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code cipherText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
try {
return withContext(context -> decrypt(algorithm, cipherText, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(algorithm, cipherText, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for "
+ "key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get());
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code digest} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get());
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation
* requires the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @param signature The signature to be verified.
* @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for "
+ "key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get());
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the keys/wrapKey permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* key} contains the wrapped key result.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code key} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for "
+ "key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get());
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is
* the reverse of the wrap operation.
* The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey
* permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
* @return A {@link Mono} containing a the unwrapped key content.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code data} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get());
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys.
* In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires
* the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
* @return The {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify Operation is not allowed for key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get());
});
}
private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid"));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid"));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid"));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e));
}
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyAvailable = !(this.key == null && keyCollection != null);
if (!keyAvailable) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().flatMap(jwk -> {
this.key = new AtomicReference<>(jwk);
initializeCryptoClients();
return Mono.just(this.key.get().isValid());
});
} else {
return getKey().flatMap(kvKey -> {
this.key = new AtomicReference<>(kvKey.getKey());
initializeCryptoClients();
return Mono.just(key.get().isValid());
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
JsonWebKey key;
private final CryptographyService service;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private String keyCollection;
private final String keyId;
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param key the key to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) {
Objects.requireNonNull(key, "The key vault key is required.");
JsonWebKey jsonWebKey = key.getKey();
Objects.requireNonNull(jsonWebKey, "The Json web key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("Json Web Key is not valid");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("Json Web Key's key operations property is not configured");
}
if (key.getKeyType() == null) {
throw new IllegalArgumentException("Json Web Key's key type property is not configured");
}
this.key = jsonWebKey;
this.keyId = key.getId();
service = RestProxy.create(CryptographyService.class, pipeline);
if (!Strings.isNullOrEmpty(key.getId())) {
unpackAndValidateId(key.getId());
cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service);
} else {
cryptographyServiceClient = null;
}
initializeCryptoClients();
}
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param keyId THe Azure Key vault key identifier to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
service = RestProxy.create(CryptographyService.class, pipeline);
cryptographyServiceClient = new CryptographyServiceClient(keyId, service);
this.key = null;
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT)) {
localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The Json Web Key Type: %s is not supported.", key.getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(context -> getKeyWithResponse(context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
return cryptographyServiceClient.getKey(context);
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a
* single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public
* portion of the key is used for encryption. This operation requires the keys/encrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
* @throws ResourceNotFoundException if the key cannot be found for encryption.
* @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code plainText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
try {
return withContext(context -> encrypt(algorithm, plaintext, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(algorithm, plaintext, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing "
+ "permission/not supported for key with id %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to
* be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the
* keys/decrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the
* specified encrypted content. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
* @return A {@link Mono} containing the decrypted blob.
* @throws ResourceNotFoundException if the key cannot be found for decryption.
* @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code cipherText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
try {
return withContext(context -> decrypt(algorithm, cipherText, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(algorithm, cipherText, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for "
+ "key with id %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code digest} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation
* requires the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @param signature The signature to be verified.
* @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for "
+ "key with id %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the keys/wrapKey permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* key} contains the wrapped key result.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code key} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for "
+ "key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is
* the reverse of the wrap operation.
* The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey
* permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
* @return A {@link Mono} containing a the unwrapped key content.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code data} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys.
* In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires
* the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
* @return The {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify Operation is not allowed for key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid"));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid"));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid"));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e));
}
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (this.key == null && keyCollection != null);
if (keyNotAvailable) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jwk -> {
this.key = (jwk);
initializeCryptoClients();
return this.key.isValid();
});
} else {
return getKey().map(kvKey -> {
this.key = (kvKey.getKey());
initializeCryptoClients();
return key.isValid();
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} |
Nit: here and the line below spends time negating, which slows things down a little but also makes readability more difficult. Could maybe be simplified. | private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyAvailable = !(this.key == null && keyCollection != null);
if (!keyAvailable) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().flatMap(jwk -> {
this.key = new AtomicReference<>(jwk);
initializeCryptoClients();
return Mono.just(this.key.get().isValid());
});
} else {
return getKey().flatMap(kvKey -> {
this.key = new AtomicReference<>(kvKey.getKey());
initializeCryptoClients();
return Mono.just(key.get().isValid());
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
} | boolean keyAvailable = !(this.key == null && keyCollection != null); | private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (this.key == null && keyCollection != null);
if (keyNotAvailable) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jwk -> {
this.key = (jwk);
initializeCryptoClients();
return this.key.isValid();
});
} else {
return getKey().map(kvKey -> {
this.key = (kvKey.getKey());
initializeCryptoClients();
return key.isValid();
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
AtomicReference<JsonWebKey> key;
private final CryptographyService service;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private String keyCollection;
private final String keyId;
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param key the key to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) {
Objects.requireNonNull(key, "The key vault key is required.");
JsonWebKey jsonWebKey = key.getKey();
Objects.requireNonNull(jsonWebKey, "The Json web key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("Json Web Key is not valid");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("Json Web Key's key operations property is not configured");
}
if (key.getKeyType() == null) {
throw new IllegalArgumentException("Json Web Key's key type property is not configured");
}
this.key = new AtomicReference<>(jsonWebKey);
this.keyId = key.getId();
service = RestProxy.create(CryptographyService.class, pipeline);
if (!Strings.isNullOrEmpty(key.getId())) {
unpackAndValidateId(key.getId());
cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service);
} else {
cryptographyServiceClient = null;
}
initializeCryptoClients();
}
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param keyId THe Azure Key vault key identifier to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
service = RestProxy.create(CryptographyService.class, pipeline);
cryptographyServiceClient = new CryptographyServiceClient(keyId, service);
this.key = null;
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) {
localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) {
localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else if (key.get().getKeyType().equals(OCT)) {
localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(context -> getKeyWithResponse(context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
return cryptographyServiceClient.getKey(context);
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a
* single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public
* portion of the key is used for encryption. This operation requires the keys/encrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
* @throws ResourceNotFoundException if the key cannot be found for encryption.
* @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code plainText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
try {
return withContext(context -> encrypt(algorithm, plaintext, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(algorithm, plaintext, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing "
+ "permission/not supported for key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get());
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to
* be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the
* keys/decrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the
* specified encrypted content. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
* @return A {@link Mono} containing the decrypted blob.
* @throws ResourceNotFoundException if the key cannot be found for decryption.
* @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code cipherText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
try {
return withContext(context -> decrypt(algorithm, cipherText, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(algorithm, cipherText, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for "
+ "key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get());
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code digest} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get());
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation
* requires the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @param signature The signature to be verified.
* @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for "
+ "key with id %s", key.get().getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get());
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the keys/wrapKey permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* key} contains the wrapped key result.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code key} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for "
+ "key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get());
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is
* the reverse of the wrap operation.
* The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey
* permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
* @return A {@link Mono} containing a the unwrapped key content.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed "
+ "for key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get());
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code data} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get());
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys.
* In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires
* the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
* @return The {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify Operation is not allowed for key with id %s", this.key.get().getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get());
});
}
private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid"));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid"));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid"));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e));
}
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
JsonWebKey key;
private final CryptographyService service;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private String keyCollection;
private final String keyId;
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param key the key to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) {
Objects.requireNonNull(key, "The key vault key is required.");
JsonWebKey jsonWebKey = key.getKey();
Objects.requireNonNull(jsonWebKey, "The Json web key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("Json Web Key is not valid");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("Json Web Key's key operations property is not configured");
}
if (key.getKeyType() == null) {
throw new IllegalArgumentException("Json Web Key's key type property is not configured");
}
this.key = jsonWebKey;
this.keyId = key.getId();
service = RestProxy.create(CryptographyService.class, pipeline);
if (!Strings.isNullOrEmpty(key.getId())) {
unpackAndValidateId(key.getId());
cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service);
} else {
cryptographyServiceClient = null;
}
initializeCryptoClients();
}
/**
* Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests
*
* @param keyId THe Azure Key vault key identifier to use for cryptography operations.
* @param pipeline HttpPipeline that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
service = RestProxy.create(CryptographyService.class, pipeline);
cryptographyServiceClient = new CryptographyServiceClient(keyId, service);
this.key = null;
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT)) {
localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The Json Web Key Type: %s is not supported.", key.getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(context -> getKeyWithResponse(context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
* @throws ResourceNotFoundException when the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
return cryptographyServiceClient.getKey(context);
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a
* single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public
* portion of the key is used for encryption. This operation requires the keys/encrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
* @throws ResourceNotFoundException if the key cannot be found for encryption.
* @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code plainText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
try {
return withContext(context -> encrypt(algorithm, plaintext, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(algorithm, plaintext, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing "
+ "permission/not supported for key with id %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to
* be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the
* keys/decrypt permission.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the
* specified encrypted content. Possible values for assymetric keys include:
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
* @return A {@link Mono} containing the decrypted blob.
* @throws ResourceNotFoundException if the key cannot be found for decryption.
* @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code cipherText} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
try {
return withContext(context -> decrypt(algorithm, cipherText, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) {
Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null.");
Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(algorithm, cipherText, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for "
+ "key with id %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code digest} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation
* requires the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
* @param signature The signature to be verified.
* @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for "
+ "key with id %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the keys/wrapKey permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* key} contains the wrapped key result.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code key} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for "
+ "key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is
* the reverse of the wrap operation.
* The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey
* permission.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a
* response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
* @return A {@link Mono} containing a the unwrapped key content.
* @throws ResourceNotFoundException if the key cannot be found for wrap operation.
* @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed "
+ "for key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the keys/sign permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
* @throws ResourceNotFoundException if the key cannot be found for signing.
* @throws UnsupportedOperationException if the sign operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm} or {@code data} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key "
+ "with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys.
* In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires
* the keys/verify permission.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* ES512}, {@link SignatureAlgorithm
* SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
* @return The {@link Boolean} indicating the signature verification result.
* @throws ResourceNotFoundException if the key cannot be found for verifying.
* @throws UnsupportedOperationException if the verify operation is not supported or configured on the key.
* @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify Operation is not allowed for key with id %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid"));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid"));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid"));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e));
}
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.